You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2018/05/18 14:47:44 UTC

[01/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 4a5d182c5 -> ead846d71


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSUtils.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSUtils.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSUtils.html
index 1cb08fa..7def7fd 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSUtils.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFSUtils.html
@@ -34,538 +34,565 @@
 <span class="sourceLineNo">026</span><a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.io.File;<a name="line.27"></a>
 <span class="sourceLineNo">028</span>import java.io.IOException;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.util.Random;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.util.UUID;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.conf.Configuration;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FileStatus;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FileSystem;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.Path;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.HConstants;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.testclassification.MiscTests;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hdfs.DFSConfigKeys;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hdfs.DFSTestUtil;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hdfs.MiniDFSCluster;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.junit.Before;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.junit.ClassRule;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.junit.Test;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.junit.experimental.categories.Category;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.slf4j.Logger;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.slf4j.LoggerFactory;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>/**<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * Test {@link FSUtils}.<a name="line.57"></a>
-<span class="sourceLineNo">058</span> */<a name="line.58"></a>
-<span class="sourceLineNo">059</span>@Category({MiscTests.class, MediumTests.class})<a name="line.59"></a>
-<span class="sourceLineNo">060</span>public class TestFSUtils {<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  @ClassRule<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.63"></a>
-<span class="sourceLineNo">064</span>      HBaseClassTestRule.forClass(TestFSUtils.class);<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final Logger LOG = LoggerFactory.getLogger(TestFSUtils.class);<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private HBaseTestingUtility htu;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  private FileSystem fs;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>  private Configuration conf;<a name="line.70"></a>
-<span class="sourceLineNo">071</span><a name="line.71"></a>
-<span class="sourceLineNo">072</span>  @Before<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  public void setUp() throws IOException {<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    htu = new HBaseTestingUtility();<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    fs = htu.getTestFileSystem();<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    conf = htu.getConfiguration();<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  }<a name="line.77"></a>
-<span class="sourceLineNo">078</span><a name="line.78"></a>
-<span class="sourceLineNo">079</span>  @Test public void testIsHDFS() throws Exception {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    assertFalse(FSUtils.isHDFS(conf));<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    MiniDFSCluster cluster = null;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    try {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>      cluster = htu.startMiniDFSCluster(1);<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      assertTrue(FSUtils.isHDFS(conf));<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    } finally {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      if (cluster != null) cluster.shutdown();<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    }<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  }<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>  private void WriteDataToHDFS(FileSystem fs, Path file, int dataSize)<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    throws Exception {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    FSDataOutputStream out = fs.create(file);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    byte [] data = new byte[dataSize];<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    out.write(data, 0, dataSize);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    out.close();<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  @Test public void testcomputeHDFSBlocksDistribution() throws Exception {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    final int DEFAULT_BLOCK_SIZE = 1024;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    conf.setLong("dfs.blocksize", DEFAULT_BLOCK_SIZE);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    MiniDFSCluster cluster = null;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    Path testFile = null;<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>    try {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      // set up a cluster with 3 nodes<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      String hosts[] = new String[] { "host1", "host2", "host3" };<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      cluster = htu.startMiniDFSCluster(hosts);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      cluster.waitActive();<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      FileSystem fs = cluster.getFileSystem();<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>      // create a file with two blocks<a name="line.111"></a>
-<span class="sourceLineNo">112</span>      testFile = new Path("/test1.txt");<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      WriteDataToHDFS(fs, testFile, 2*DEFAULT_BLOCK_SIZE);<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>      // given the default replication factor is 3, the same as the number of<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      // datanodes; the locality index for each host should be 100%,<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      // or getWeight for each host should be the same as getUniqueBlocksWeights<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      final long maxTime = System.currentTimeMillis() + 2000;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>      boolean ok;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      do {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>        ok = true;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        FileStatus status = fs.getFileStatus(testFile);<a name="line.122"></a>
-<span class="sourceLineNo">123</span>        HDFSBlocksDistribution blocksDistribution =<a name="line.123"></a>
-<span class="sourceLineNo">124</span>          FSUtils.computeHDFSBlocksDistribution(fs, status, 0, status.getLen());<a name="line.124"></a>
-<span class="sourceLineNo">125</span>        long uniqueBlocksTotalWeight =<a name="line.125"></a>
-<span class="sourceLineNo">126</span>          blocksDistribution.getUniqueBlocksTotalWeight();<a name="line.126"></a>
-<span class="sourceLineNo">127</span>        for (String host : hosts) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          long weight = blocksDistribution.getWeight(host);<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          ok = (ok &amp;&amp; uniqueBlocksTotalWeight == weight);<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        }<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      } while (!ok &amp;&amp; System.currentTimeMillis() &lt; maxTime);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      assertTrue(ok);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      } finally {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      htu.shutdownMiniDFSCluster();<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">029</span>import java.util.List;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.Random;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.UUID;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.conf.Configuration;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FileStatus;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileSystem;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.Path;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HConstants;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.testclassification.MiscTests;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hdfs.DFSConfigKeys;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hdfs.DFSTestUtil;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hdfs.MiniDFSCluster;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.junit.Before;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.junit.ClassRule;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.junit.Test;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.junit.experimental.categories.Category;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.slf4j.Logger;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.slf4j.LoggerFactory;<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>/**<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * Test {@link FSUtils}.<a name="line.58"></a>
+<span class="sourceLineNo">059</span> */<a name="line.59"></a>
+<span class="sourceLineNo">060</span>@Category({MiscTests.class, MediumTests.class})<a name="line.60"></a>
+<span class="sourceLineNo">061</span>public class TestFSUtils {<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  @ClassRule<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.64"></a>
+<span class="sourceLineNo">065</span>      HBaseClassTestRule.forClass(TestFSUtils.class);<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  private static final Logger LOG = LoggerFactory.getLogger(TestFSUtils.class);<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>  private HBaseTestingUtility htu;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private FileSystem fs;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  private Configuration conf;<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  @Before<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  public void setUp() throws IOException {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    htu = new HBaseTestingUtility();<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    fs = htu.getTestFileSystem();<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    conf = htu.getConfiguration();<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  @Test public void testIsHDFS() throws Exception {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    assertFalse(FSUtils.isHDFS(conf));<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    MiniDFSCluster cluster = null;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    try {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>      cluster = htu.startMiniDFSCluster(1);<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      assertTrue(FSUtils.isHDFS(conf));<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    } finally {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      if (cluster != null) cluster.shutdown();<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    }<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  }<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>  private void WriteDataToHDFS(FileSystem fs, Path file, int dataSize)<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    throws Exception {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    FSDataOutputStream out = fs.create(file);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    byte [] data = new byte[dataSize];<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    out.write(data, 0, dataSize);<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    out.close();<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  }<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  @Test public void testcomputeHDFSBlocksDistribution() throws Exception {<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    final int DEFAULT_BLOCK_SIZE = 1024;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    conf.setLong("dfs.blocksize", DEFAULT_BLOCK_SIZE);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    MiniDFSCluster cluster = null;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    Path testFile = null;<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>    try {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>      // set up a cluster with 3 nodes<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      String hosts[] = new String[] { "host1", "host2", "host3" };<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      cluster = htu.startMiniDFSCluster(hosts);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      cluster.waitActive();<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      FileSystem fs = cluster.getFileSystem();<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>      // create a file with two blocks<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      testFile = new Path("/test1.txt");<a name="line.113"></a>
+<span class="sourceLineNo">114</span>      WriteDataToHDFS(fs, testFile, 2*DEFAULT_BLOCK_SIZE);<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>      // given the default replication factor is 3, the same as the number of<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      // datanodes; the locality index for each host should be 100%,<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      // or getWeight for each host should be the same as getUniqueBlocksWeights<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      final long maxTime = System.currentTimeMillis() + 2000;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>      boolean ok;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>      do {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>        ok = true;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>        FileStatus status = fs.getFileStatus(testFile);<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        HDFSBlocksDistribution blocksDistribution =<a name="line.124"></a>
+<span class="sourceLineNo">125</span>          FSUtils.computeHDFSBlocksDistribution(fs, status, 0, status.getLen());<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        long uniqueBlocksTotalWeight =<a name="line.126"></a>
+<span class="sourceLineNo">127</span>          blocksDistribution.getUniqueBlocksTotalWeight();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        for (String host : hosts) {<a name="line.128"></a>
+<span class="sourceLineNo">129</span>          long weight = blocksDistribution.getWeight(host);<a name="line.129"></a>
+<span class="sourceLineNo">130</span>          ok = (ok &amp;&amp; uniqueBlocksTotalWeight == weight);<a name="line.130"></a>
+<span class="sourceLineNo">131</span>        }<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      } while (!ok &amp;&amp; System.currentTimeMillis() &lt; maxTime);<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      assertTrue(ok);<a name="line.133"></a>
+<span class="sourceLineNo">134</span>      } finally {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      htu.shutdownMiniDFSCluster();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
 <span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    try {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      // set up a cluster with 4 nodes<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      String hosts[] = new String[] { "host1", "host2", "host3", "host4" };<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      cluster = htu.startMiniDFSCluster(hosts);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      cluster.waitActive();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      FileSystem fs = cluster.getFileSystem();<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>      // create a file with three blocks<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      testFile = new Path("/test2.txt");<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      WriteDataToHDFS(fs, testFile, 3*DEFAULT_BLOCK_SIZE);<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>      // given the default replication factor is 3, we will have total of 9<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      // replica of blocks; thus the host with the highest weight should have<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      // weight == 3 * DEFAULT_BLOCK_SIZE<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      final long maxTime = System.currentTimeMillis() + 2000;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      long weight;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      long uniqueBlocksTotalWeight;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      do {<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        FileStatus status = fs.getFileStatus(testFile);<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        HDFSBlocksDistribution blocksDistribution =<a name="line.157"></a>
-<span class="sourceLineNo">158</span>          FSUtils.computeHDFSBlocksDistribution(fs, status, 0, status.getLen());<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        uniqueBlocksTotalWeight = blocksDistribution.getUniqueBlocksTotalWeight();<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>        String tophost = blocksDistribution.getTopHosts().get(0);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        weight = blocksDistribution.getWeight(tophost);<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>        // NameNode is informed asynchronously, so we may have a delay. See HBASE-6175<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      } while (uniqueBlocksTotalWeight != weight &amp;&amp; System.currentTimeMillis() &lt; maxTime);<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      assertTrue(uniqueBlocksTotalWeight == weight);<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>    } finally {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      htu.shutdownMiniDFSCluster();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>    try {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      // set up a cluster with 4 nodes<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      String hosts[] = new String[] { "host1", "host2", "host3", "host4" };<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      cluster = htu.startMiniDFSCluster(hosts);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      cluster.waitActive();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>      FileSystem fs = cluster.getFileSystem();<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>      // create a file with three blocks<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      testFile = new Path("/test2.txt");<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      WriteDataToHDFS(fs, testFile, 3*DEFAULT_BLOCK_SIZE);<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>      // given the default replication factor is 3, we will have total of 9<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      // replica of blocks; thus the host with the highest weight should have<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      // weight == 3 * DEFAULT_BLOCK_SIZE<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      final long maxTime = System.currentTimeMillis() + 2000;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      long weight;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      long uniqueBlocksTotalWeight;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      do {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>        FileStatus status = fs.getFileStatus(testFile);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>        HDFSBlocksDistribution blocksDistribution =<a name="line.158"></a>
+<span class="sourceLineNo">159</span>          FSUtils.computeHDFSBlocksDistribution(fs, status, 0, status.getLen());<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        uniqueBlocksTotalWeight = blocksDistribution.getUniqueBlocksTotalWeight();<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>        String tophost = blocksDistribution.getTopHosts().get(0);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>        weight = blocksDistribution.getWeight(tophost);<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>        // NameNode is informed asynchronously, so we may have a delay. See HBASE-6175<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      } while (uniqueBlocksTotalWeight != weight &amp;&amp; System.currentTimeMillis() &lt; maxTime);<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      assertTrue(uniqueBlocksTotalWeight == weight);<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>    } finally {<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      htu.shutdownMiniDFSCluster();<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    try {<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      // set up a cluster with 4 nodes<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      String hosts[] = new String[] { "host1", "host2", "host3", "host4" };<a name="line.175"></a>
-<span class="sourceLineNo">176</span>      cluster = htu.startMiniDFSCluster(hosts);<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      cluster.waitActive();<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      FileSystem fs = cluster.getFileSystem();<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>      // create a file with one block<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      testFile = new Path("/test3.txt");<a name="line.181"></a>
-<span class="sourceLineNo">182</span>      WriteDataToHDFS(fs, testFile, DEFAULT_BLOCK_SIZE);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      // given the default replication factor is 3, we will have total of 3<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      // replica of blocks; thus there is one host without weight<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      final long maxTime = System.currentTimeMillis() + 2000;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      HDFSBlocksDistribution blocksDistribution;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      do {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>        FileStatus status = fs.getFileStatus(testFile);<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        blocksDistribution = FSUtils.computeHDFSBlocksDistribution(fs, status, 0, status.getLen());<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        // NameNode is informed asynchronously, so we may have a delay. See HBASE-6175<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      while (blocksDistribution.getTopHosts().size() != 3 &amp;&amp; System.currentTimeMillis() &lt; maxTime);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      assertEquals("Wrong number of hosts distributing blocks.", 3,<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        blocksDistribution.getTopHosts().size());<a name="line.195"></a>
-<span class="sourceLineNo">196</span>    } finally {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      htu.shutdownMiniDFSCluster();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    }<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  @Test<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  public void testVersion() throws DeserializationException, IOException {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    final Path rootdir = htu.getDataTestDir();<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    final FileSystem fs = rootdir.getFileSystem(conf);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    assertNull(FSUtils.getVersion(fs, rootdir));<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    // Write out old format version file.  See if we can read it in and convert.<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    FSDataOutputStream s = fs.create(versionFile);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    final String version = HConstants.FILE_SYSTEM_VERSION;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    s.writeUTF(version);<a name="line.210"></a>
-<span class="sourceLineNo">211</span>    s.close();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    assertTrue(fs.exists(versionFile));<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    FileStatus [] status = fs.listStatus(versionFile);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    assertNotNull(status);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    assertTrue(status.length &gt; 0);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    String newVersion = FSUtils.getVersion(fs, rootdir);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    assertEquals(version.length(), newVersion.length());<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    assertEquals(version, newVersion);<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    // File will have been converted. Exercise the pb format<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    assertEquals(version, FSUtils.getVersion(fs, rootdir));<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    FSUtils.checkVersion(fs, rootdir, true);<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  @Test<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  public void testPermMask() throws Exception {<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    final Path rootdir = htu.getDataTestDir();<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    final FileSystem fs = rootdir.getFileSystem(conf);<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    // default fs permission<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    FsPermission defaultFsPerm = FSUtils.getFilePermissions(fs, conf,<a name="line.229"></a>
-<span class="sourceLineNo">230</span>        HConstants.DATA_FILE_UMASK_KEY);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    // 'hbase.data.umask.enable' is false. We will get default fs permission.<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    assertEquals(FsPermission.getFileDefault(), defaultFsPerm);<a name="line.232"></a>
-<span class="sourceLineNo">233</span><a name="line.233"></a>
-<span class="sourceLineNo">234</span>    conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    // first check that we don't crash if we don't have perms set<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    FsPermission defaultStartPerm = FSUtils.getFilePermissions(fs, conf,<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        HConstants.DATA_FILE_UMASK_KEY);<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    // default 'hbase.data.umask'is 000, and this umask will be used when<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    // 'hbase.data.umask.enable' is true.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>    // Therefore we will not get the real fs default in this case.<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    // Instead we will get the starting point FULL_RWX_PERMISSIONS<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    assertEquals(new FsPermission(FSUtils.FULL_RWX_PERMISSIONS), defaultStartPerm);<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>    conf.setStrings(HConstants.DATA_FILE_UMASK_KEY, "077");<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    // now check that we get the right perms<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    FsPermission filePerm = FSUtils.getFilePermissions(fs, conf,<a name="line.246"></a>
-<span class="sourceLineNo">247</span>        HConstants.DATA_FILE_UMASK_KEY);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    assertEquals(new FsPermission("700"), filePerm);<a name="line.248"></a>
-<span class="sourceLineNo">249</span><a name="line.249"></a>
-<span class="sourceLineNo">250</span>    // then that the correct file is created<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    Path p = new Path("target" + File.separator + UUID.randomUUID().toString());<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    try {<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      FSDataOutputStream out = FSUtils.create(conf, fs, p, filePerm, null);<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      out.close();<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      FileStatus stat = fs.getFileStatus(p);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      assertEquals(new FsPermission("700"), stat.getPermission());<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      // and then cleanup<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } finally {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      fs.delete(p, true);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span>  }<a name="line.261"></a>
-<span class="sourceLineNo">262</span><a name="line.262"></a>
-<span class="sourceLineNo">263</span>  @Test<a name="line.263"></a>
-<span class="sourceLineNo">264</span>  public void testDeleteAndExists() throws Exception {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    final Path rootdir = htu.getDataTestDir();<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    final FileSystem fs = rootdir.getFileSystem(conf);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    // then that the correct file is created<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    String file = UUID.randomUUID().toString();<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      FSDataOutputStream out = FSUtils.create(conf, fs, p, perms, null);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      out.close();<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      assertTrue("The created file should be present", FSUtils.isExists(fs, p));<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      // delete the file with recursion as false. Only the file will be deleted.<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      FSUtils.delete(fs, p, false);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Create another file<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FSDataOutputStream out1 = FSUtils.create(conf, fs, p1, perms, null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      out1.close();<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // delete the file with recursion as false. Still the file only will be deleted<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      FSUtils.delete(fs, p1, true);<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      assertFalse("The created file should be present", FSUtils.isExists(fs, p1));<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      // and then cleanup<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    } finally {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      FSUtils.delete(fs, p, true);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      FSUtils.delete(fs, p1, true);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    }<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  }<a name="line.290"></a>
-<span class="sourceLineNo">291</span><a name="line.291"></a>
-<span class="sourceLineNo">292</span>  @Test<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  public void testFilteredStatusDoesNotThrowOnNotFound() throws Exception {<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    MiniDFSCluster cluster = htu.startMiniDFSCluster(1);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    try {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>      assertNull(FSUtils.listStatusWithStatusFilter(cluster.getFileSystem(), new Path("definitely/doesn't/exist"), null));<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    } finally {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      cluster.shutdown();<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    }<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  }<a name="line.301"></a>
-<span class="sourceLineNo">302</span><a name="line.302"></a>
-<span class="sourceLineNo">303</span>  @Test<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  public void testRenameAndSetModifyTime() throws Exception {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    MiniDFSCluster cluster = htu.startMiniDFSCluster(1);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    assertTrue(FSUtils.isHDFS(conf));<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>    FileSystem fs = FileSystem.get(conf);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile");<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>    String file = UUID.randomUUID().toString();<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    Path p = new Path(testDir, file);<a name="line.312"></a>
-<span class="sourceLineNo">313</span><a name="line.313"></a>
-<span class="sourceLineNo">314</span>    FSDataOutputStream out = fs.create(p);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    out.close();<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    assertTrue("The created file should be present", FSUtils.isExists(fs, p));<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>    long expect = System.currentTimeMillis() + 1000;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    assertNotEquals(expect, fs.getFileStatus(p).getModificationTime());<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    ManualEnvironmentEdge mockEnv = new ManualEnvironmentEdge();<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    mockEnv.setValue(expect);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    EnvironmentEdgeManager.injectEdge(mockEnv);<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    try {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      String dstFile = UUID.randomUUID().toString();<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      Path dst = new Path(testDir , dstFile);<a name="line.326"></a>
-<span class="sourceLineNo">327</span><a name="line.327"></a>
-<span class="sourceLineNo">328</span>      assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst));<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      assertFalse("The moved file should not be present", FSUtils.isExists(fs, p));<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      assertTrue("The dst file should be present", FSUtils.isExists(fs, dst));<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>      assertEquals(expect, fs.getFileStatus(dst).getModificationTime());<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      cluster.shutdown();<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    } finally {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      EnvironmentEdgeManager.reset();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  @Test<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  public void testSetStoragePolicyDefault() throws Exception {<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    verifyFileInDirWithStoragePolicy(HConstants.DEFAULT_WAL_STORAGE_POLICY);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  }<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  /* might log a warning, but still work. (always warning on Hadoop &lt; 2.6.0) */<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  @Test<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  public void testSetStoragePolicyValidButMaybeNotPresent() throws Exception {<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    verifyFileInDirWithStoragePolicy("ALL_SSD");<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  }<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /* should log a warning, but still work. (different warning on Hadoop &lt; 2.6.0) */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  @Test<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  public void testSetStoragePolicyInvalid() throws Exception {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    verifyFileInDirWithStoragePolicy("1772");<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  // Here instead of TestCommonFSUtils because we need a minicluster<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  private void verifyFileInDirWithStoragePolicy(final String policy) throws Exception {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    conf.set(HConstants.WAL_STORAGE_POLICY, policy);<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>    MiniDFSCluster cluster = htu.startMiniDFSCluster(1);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    try {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      assertTrue(FSUtils.isHDFS(conf));<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>      FileSystem fs = FileSystem.get(conf);<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile");<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      fs.mkdirs(testDir);<a name="line.366"></a>
-<span class="sourceLineNo">367</span><a name="line.367"></a>
-<span class="sourceLineNo">368</span>      FSUtils.setStoragePolicy(fs, conf, testDir, HConstants.WAL_STORAGE_POLICY,<a name="line.368"></a>
-<span class="sourceLineNo">369</span>          HConstants.DEFAULT_WAL_STORAGE_POLICY);<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      String file = UUID.randomUUID().toString();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      Path p = new Path(testDir, file);<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      WriteDataToHDFS(fs, p, 4096);<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      // will assert existance before deleting.<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      cleanupFile(fs, testDir);<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } finally {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      cluster.shutdown();<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
-<span class="sourceLineNo">380</span><a name="line.380"></a>
-<span class="sourceLineNo">381</span>  /**<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * Ugly test that ensures we can get at the hedged read counters in dfsclient.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Does a bit of preading with hedged reads enabled using code taken from hdfs TestPread.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @throws Exception<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   */<a name="line.385"></a>
-<span class="sourceLineNo">386</span>  @Test public void testDFSHedgedReadMetrics() throws Exception {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    // Enable hedged reads and set it so the threshold is really low.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    // Most of this test is taken from HDFS, from TestPread.<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    conf.setInt(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THREADPOOL_SIZE, 5);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    conf.setLong(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THRESHOLD_MILLIS, 0);<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 4096);<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    conf.setLong(DFSConfigKeys.DFS_CLIENT_READ_PREFETCH_SIZE_KEY, 4096);<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    // Set short retry timeouts so this test runs faster<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    conf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 0);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    conf.setBoolean("dfs.datanode.transferTo.allowed", false);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    // Get the metrics.  Should be empty.<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    DFSHedgedReadMetrics metrics = FSUtils.getDFSHedgedReadMetrics(conf);<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    assertEquals(0, metrics.getHedgedReadOps());<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    FileSystem fileSys = cluster.getFileSystem();<a name="line.400"></a>
-<span class="sourceLineNo">401</span>    try {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      Path p = new Path("preadtest.dat");<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      // We need &gt; 1 blocks to test out the hedged reads.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>      DFSTestUtil.createFile(fileSys, p, 12 * blockSize, 12 * blockSize,<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        blockSize, (short) 3, seed);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      pReadFile(fileSys, p);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      cleanupFile(fileSys, p);<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      assertTrue(metrics.getHedgedReadOps() &gt; 0);<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    } finally {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      fileSys.close();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      cluster.shutdown();<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    }<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  }<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>  // Below is taken from TestPread over in HDFS.<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  static final int blockSize = 4096;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  static final long seed = 0xDEADBEEFL;<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  private void pReadFile(FileSystem fileSys, Path name) throws IOException {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    FSDataInputStream stm = fileSys.open(name);<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    byte[] expected = new byte[12 * blockSize];<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    Random rand = new Random(seed);<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    rand.nextBytes(expected);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    // do a sanity check. Read first 4K bytes<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    byte[] actual = new byte[4096];<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    stm.readFully(actual);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    checkAndEraseData(actual, 0, expected, "Read Sanity Test");<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    // now do a pread for the first 8K bytes<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    actual = new byte[8192];<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    doPread(stm, 0L, actual, 0, 8192);<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    checkAndEraseData(actual, 0, expected, "Pread Test 1");<a name="line.431"></a>
-<span class="sourceLineNo">432</span>    // Now check to see if the normal read returns 4K-8K byte range<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    actual = new byte[4096];<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    stm.readFully(actual);<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    checkAndEraseData(actual, 4096, expected, "Pread Test 2");<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    // Now see if we can cross a single block boundary successfully<a name="line.436"></a>
-<span class="sourceLineNo">437</span>    // read 4K bytes from blockSize - 2K offset<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    stm.readFully(blockSize - 2048, actual, 0, 4096);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>    checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 3");<a name="line.439"></a>
-<span class="sourceLineNo">440</span>    // now see if we can cross two block boundaries successfully<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    // read blockSize + 4K bytes from blockSize - 2K offset<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    actual = new byte[blockSize + 4096];<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    stm.readFully(blockSize - 2048, actual);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 4");<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    // now see if we can cross two block boundaries that are not cached<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    // read blockSize + 4K bytes from 10*blockSize - 2K offset<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    actual = new byte[blockSize + 4096];<a name="line.447"></a>
-<span class="sourceLineNo">448</span>    stm.readFully(10 * blockSize - 2048, actual);<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    checkAndEraseData(actual, (10 * blockSize - 2048), expected, "Pread Test 5");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    // now check that even after all these preads, we can still read<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    // bytes 8K-12K<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    actual = new byte[4096];<a name="line.452"></a>
+<span class="sourceLineNo">173</span><a name="line.173"></a>
+<span class="sourceLineNo">174</span>    try {<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      // set up a cluster with 4 nodes<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      String hosts[] = new String[] { "host1", "host2", "host3", "host4" };<a name="line.176"></a>
+<span class="sourceLineNo">177</span>      cluster = htu.startMiniDFSCluster(hosts);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      cluster.waitActive();<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      FileSystem fs = cluster.getFileSystem();<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>      // create a file with one block<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      testFile = new Path("/test3.txt");<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      WriteDataToHDFS(fs, testFile, DEFAULT_BLOCK_SIZE);<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>      // given the default replication factor is 3, we will have total of 3<a name="line.185"></a>
+<span class="sourceLineNo">186</span>      // replica of blocks; thus there is one host without weight<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      final long maxTime = System.currentTimeMillis() + 2000;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      HDFSBlocksDistribution blocksDistribution;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      do {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>        FileStatus status = fs.getFileStatus(testFile);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        blocksDistribution = FSUtils.computeHDFSBlocksDistribution(fs, status, 0, status.getLen());<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        // NameNode is informed asynchronously, so we may have a delay. See HBASE-6175<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      }<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      while (blocksDistribution.getTopHosts().size() != 3 &amp;&amp; System.currentTimeMillis() &lt; maxTime);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      assertEquals("Wrong number of hosts distributing blocks.", 3,<a name="line.195"></a>
+<span class="sourceLineNo">196</span>        blocksDistribution.getTopHosts().size());<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    } finally {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      htu.shutdownMiniDFSCluster();<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    }<a name="line.199"></a>
+<span class="sourceLineNo">200</span>  }<a name="line.200"></a>
+<span class="sourceLineNo">201</span><a name="line.201"></a>
+<span class="sourceLineNo">202</span>  @Test<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  public void testVersion() throws DeserializationException, IOException {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    final Path rootdir = htu.getDataTestDir();<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    final FileSystem fs = rootdir.getFileSystem(conf);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    assertNull(FSUtils.getVersion(fs, rootdir));<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    // Write out old format version file.  See if we can read it in and convert.<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    FSDataOutputStream s = fs.create(versionFile);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    final String version = HConstants.FILE_SYSTEM_VERSION;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    s.writeUTF(version);<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    s.close();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    assertTrue(fs.exists(versionFile));<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    FileStatus [] status = fs.listStatus(versionFile);<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    assertNotNull(status);<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    assertTrue(status.length &gt; 0);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    String newVersion = FSUtils.getVersion(fs, rootdir);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    assertEquals(version.length(), newVersion.length());<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    assertEquals(version, newVersion);<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    // File will have been converted. Exercise the pb format<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    assertEquals(version, FSUtils.getVersion(fs, rootdir));<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    FSUtils.checkVersion(fs, rootdir, true);<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  }<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  @Test<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  public void testPermMask() throws Exception {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    final Path rootdir = htu.getDataTestDir();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    final FileSystem fs = rootdir.getFileSystem(conf);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    // default fs permission<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    FsPermission defaultFsPerm = FSUtils.getFilePermissions(fs, conf,<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        HConstants.DATA_FILE_UMASK_KEY);<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    // 'hbase.data.umask.enable' is false. We will get default fs permission.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    assertEquals(FsPermission.getFileDefault(), defaultFsPerm);<a name="line.233"></a>
+<span class="sourceLineNo">234</span><a name="line.234"></a>
+<span class="sourceLineNo">235</span>    conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    // first check that we don't crash if we don't have perms set<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    FsPermission defaultStartPerm = FSUtils.getFilePermissions(fs, conf,<a name="line.237"></a>
+<span class="sourceLineNo">238</span>        HConstants.DATA_FILE_UMASK_KEY);<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    // default 'hbase.data.umask'is 000, and this umask will be used when<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    // 'hbase.data.umask.enable' is true.<a name="line.240"></a>
+<span class="sourceLineNo">241</span>    // Therefore we will not get the real fs default in this case.<a name="line.241"></a>
+<span class="sourceLineNo">242</span>    // Instead we will get the starting point FULL_RWX_PERMISSIONS<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    assertEquals(new FsPermission(FSUtils.FULL_RWX_PERMISSIONS), defaultStartPerm);<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    conf.setStrings(HConstants.DATA_FILE_UMASK_KEY, "077");<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    // now check that we get the right perms<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    FsPermission filePerm = FSUtils.getFilePermissions(fs, conf,<a name="line.247"></a>
+<span class="sourceLineNo">248</span>        HConstants.DATA_FILE_UMASK_KEY);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    assertEquals(new FsPermission("700"), filePerm);<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>    // then that the correct file is created<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    Path p = new Path("target" + File.separator + UUID.randomUUID().toString());<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    try {<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      FSDataOutputStream out = FSUtils.create(conf, fs, p, filePerm, null);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      out.close();<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      FileStatus stat = fs.getFileStatus(p);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      assertEquals(new FsPermission("700"), stat.getPermission());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      // and then cleanup<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    } finally {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      fs.delete(p, true);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  }<a name="line.262"></a>
+<span class="sourceLineNo">263</span><a name="line.263"></a>
+<span class="sourceLineNo">264</span>  @Test<a name="line.264"></a>
+<span class="sourceLineNo">265</span>  public void testDeleteAndExists() throws Exception {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    final Path rootdir = htu.getDataTestDir();<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    final FileSystem fs = rootdir.getFileSystem(conf);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    // then that the correct file is created<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    String file = UUID.randomUUID().toString();<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    try {<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      FSDataOutputStream out = FSUtils.create(conf, fs, p, perms, null);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      out.close();<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      assertTrue("The created file should be present", FSUtils.isExists(fs, p));<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      // delete the file with recursion as false. Only the file will be deleted.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      FSUtils.delete(fs, p, false);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      // Create another file<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      FSDataOutputStream out1 = FSUtils.create(conf, fs, p1, perms, null);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      out1.close();<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      // delete the file with recursion as false. Still the file only will be deleted<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      FSUtils.delete(fs, p1, true);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      assertFalse("The created file should be present", FSUtils.isExists(fs, p1));<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      // and then cleanup<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    } finally {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FSUtils.delete(fs, p, true);<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      FSUtils.delete(fs, p1, true);<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    }<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  @Test<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  public void testFilteredStatusDoesNotThrowOnNotFound() throws Exception {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    MiniDFSCluster cluster = htu.startMiniDFSCluster(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    try {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      assertNull(FSUtils.listStatusWithStatusFilter(cluster.getFileSystem(), new Path("definitely/doesn't/exist"), null));<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    } finally {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      cluster.shutdown();<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    }<a name="line.300"></a>
+<span class="sourceLineNo">301</span><a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  @Test<a name="line.304"></a>
+<span class="sourceLineNo">305</span>  public void testRenameAndSetModifyTime() throws Exception {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    MiniDFSCluster cluster = htu.startMiniDFSCluster(1);<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    assertTrue(FSUtils.isHDFS(conf));<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>    FileSystem fs = FileSystem.get(conf);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile");<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>    String file = UUID.randomUUID().toString();<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    Path p = new Path(testDir, file);<a name="line.313"></a>
+<span class="sourceLineNo">314</span><a name="line.314"></a>
+<span class="sourceLineNo">315</span>    FSDataOutputStream out = fs.create(p);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    assertTrue("The created file should be present", FSUtils.isExists(fs, p));<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>    long expect = System.currentTimeMillis() + 1000;<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    assertNotEquals(expect, fs.getFileStatus(p).getModificationTime());<a name="line.320"></a>
+<span class="sourceLineNo">321</span><a name="line.321"></a>
+<span class="sourceLineNo">322</span>    ManualEnvironmentEdge mockEnv = new ManualEnvironmentEdge();<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    mockEnv.setValue(expect);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    EnvironmentEdgeManager.injectEdge(mockEnv);<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    try {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      String dstFile = UUID.randomUUID().toString();<a name="line.326"></a>
+<span class="sourceLineNo">327</span>      Path dst = new Path(testDir , dstFile);<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>      assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst));<a name="line.329"></a>
+<span class="sourceLineNo">330</span>      assertFalse("The moved file should not be present", FSUtils.isExists(fs, p));<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      assertTrue("The dst file should be present", FSUtils.isExists(fs, dst));<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>      assertEquals(expect, fs.getFileStatus(dst).getModificationTime());<a name="line.333"></a>
+<span class="sourceLineNo">334</span>      cluster.shutdown();<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    } finally {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>      EnvironmentEdgeManager.reset();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    }<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  @Test<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  public void testSetStoragePolicyDefault() throws Exception {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    verifyFileInDirWithStoragePolicy(HConstants.DEFAULT_WAL_STORAGE_POLICY);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  }<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  /* might log a warning, but still work. (always warning on Hadoop &lt; 2.6.0) */<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  @Test<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  public void testSetStoragePolicyValidButMaybeNotPresent() throws Exception {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    verifyFileInDirWithStoragePolicy("ALL_SSD");<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>  /* should log a warning, but still work. (different warning on Hadoop &lt; 2.6.0) */<a name="line.351"></a>
+<span class="sourceLineNo">352</span>  @Test<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  public void testSetStoragePolicyInvalid() throws Exception {<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    verifyFileInDirWithStoragePolicy("1772");<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  }<a name="line.355"></a>
+<span class="sourceLineNo">356</span><a name="line.356"></a>
+<span class="sourceLineNo">357</span>  // Here instead of TestCommonFSUtils because we need a minicluster<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  private void verifyFileInDirWithStoragePolicy(final String policy) throws Exception {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    conf.set(HConstants.WAL_STORAGE_POLICY, policy);<a name="line.359"></a>
+<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">361</span>    MiniDFSCluster cluster = htu.startMiniDFSCluster(1);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    try {<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      assertTrue(FSUtils.isHDFS(conf));<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>      FileSystem fs = FileSystem.get(conf);<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile");<a name="line.366"></a>
+<span class="sourceLineNo">367</span>      fs.mkdirs(testDir);<a name="line.367"></a>
+<span class="sourceLineNo">368</span><a name="line.368"></a>
+<span class="sourceLineNo">369</span>      FSUtils.setStoragePolicy(fs, conf, testDir, HConstants.WAL_STORAGE_POLICY,<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          HConstants.DEFAULT_WAL_STORAGE_POLICY);<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>      String file = UUID.randomUUID().toString();<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      Path p = new Path(testDir, file);<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      WriteDataToHDFS(fs, p, 4096);<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      // will assert existance before deleting.<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      cleanupFile(fs, testDir);<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    } finally {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>      cluster.shutdown();<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * Ugly test that ensures we can get at the hedged read counters in dfsclient.<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * Does a bit of preading with hedged reads enabled using code taken from hdfs TestPread.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * @throws Exception<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  @Test public void testDFSHedgedReadMetrics() throws Exception {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    // Enable hedged reads and set it so the threshold is really low.<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    // Most of this test is taken from HDFS, from TestPread.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    conf.setInt(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THREADPOOL_SIZE, 5);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    conf.setLong(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THRESHOLD_MILLIS, 0);<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 4096);<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    conf.setLong(DFSConfigKeys.DFS_CLIENT_READ_PREFETCH_SIZE_KEY, 4096);<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    // Set short retry timeouts so this test runs faster<a name="line.394"></a>
+<span class="sourceLineNo">395</span>    conf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 0);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    conf.setBoolean("dfs.datanode.transferTo.allowed", false);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    // Get the metrics.  Should be empty.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    DFSHedgedReadMetrics metrics = FSUtils.getDFSHedgedReadMetrics(conf);<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    assertEquals(0, metrics.getHedgedReadOps());<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    FileSystem fileSys = cluster.getFileSystem();<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    try {<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      Path p = new Path("preadtest.dat");<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      // We need &gt; 1 blocks to test out the hedged reads.<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      DFSTestUtil.createFile(fileSys, p, 12 * blockSize, 12 * blockSize,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>        blockSize, (short) 3, seed);<a name="line.406"></a>
+<span class="sourceLineNo">407</span>      pReadFile(fileSys, p);<a name="line.407"></a>
+<span class="sourceLineNo">408</span>      cleanupFile(fileSys, p);<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      assertTrue(metrics.getHedgedReadOps() &gt; 0);<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    } finally {<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      fileSys.close();<a name="line.411"></a>
+<span class="sourceLineNo">412</span>      cluster.shutdown();<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    }<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  }<a name="line.414"></a>
+<span class="sourceLineNo">415</span><a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  @Test<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  public void testCopyFilesParallel() throws Exception {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    MiniDFSCluster cluster = htu.startMiniDFSCluster(1);<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    cluster.waitActive();<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    FileSystem fs = cluster.getFileSystem();<a name="line.421"></a>
+<span class="sourceLineNo">422</span>    Path src = new Path("/src");<a name="line.422"></a>
+<span class="sourceLineNo">423</span>    fs.mkdirs(src);<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    for (int i = 0; i &lt; 50; i++) {<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      WriteDataToHDFS(fs, new Path(src, String.valueOf(i)), 1024);<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    Path sub = new Path(src, "sub");<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    fs.mkdirs(sub);<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    for (int i = 0; i &lt; 50; i++) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      WriteDataToHDFS(fs, new Path(sub, String.valueOf(i)), 1024);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    }<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    Path dst = new Path("/dst");<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    List&lt;Path&gt; allFiles = FSUtils.copyFilesParallel(fs, src, fs, dst, conf, 4);<a name="line.433"></a>
+<span class="sourceLineNo">434</span><a name="line.434"></a>
+<span class="sourceLineNo">435</span>    assertEquals(102, allFiles.size());<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    FileStatus[] list = fs.listStatus(dst);<a name="line.436"></a>
+<span class="sourceLineNo">437</span>    assertEquals(51, list.length);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    FileStatus[] sublist = fs.listStatus(new Path(dst, "sub"));<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    assertEquals(50, sublist.length);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  }<a name="line.440"></a>
+<span class="sourceLineNo">441</span><a name="line.441"></a>
+<span class="sourceLineNo">442</span>  // Below is taken from TestPread over in HDFS.<a name="line.442"></a>
+<span class="sourceLineNo">443</span>  static final int blockSize = 4096;<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  static final long seed = 0xDEADBEEFL;<a name="line.444"></a>
+<span class="sourceLineNo">445</span><a name="line.445"></a>
+<span class="sourceLineNo">446</span>  private void pReadFile(FileSystem fileSys, Path name) throws IOException {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>    FSDataInputStream stm = fileSys.open(name);<a name="line.447"></a>
+<span class="sourceLineNo">448</span>    byte[] expected = new byte[12 * blockSize];<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    Random rand = new Random(seed);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>    rand.nextBytes(expected);<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    // do a sanity check. Read first 4K bytes<a name="line.451"></a>
+<span class="sourceLineNo">452</span>    byte[] actual = new byte[4096];<a name="line.452"></a>
 <span class="sourceLineNo">453</span>    stm.readFully(actual);<a name="line.453"></a>
-<span class="sourceLineNo">454</span>    checkAndEraseData(actual, 8192, expected, "Pread Test 6");<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    // done<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    stm.close();<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    // check block location caching<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    stm = fileSys.open(name);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    stm.readFully(1, actual, 0, 4096);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>    stm.readFully(4*blockSize, actual, 0, 4096);<a name="line.460"></a>
-<span class="sourceLineNo">461</span>    stm.readFully(7*blockSize, actual, 0, 4096);<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    actual = new byte[3*4096];<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    stm.readFully(0*blockSize, actual, 0, 3*4096);<a name="line.463"></a>
-<span class="sourceLineNo">464</span>    checkAndEraseData(actual, 0, expected, "Pread Test 7");<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    actual = new byte[8*4096];<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    stm.readFully(3*blockSize, actual, 0, 8*4096);<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    checkAndEraseData(actual, 3*blockSize, expected, "Pread Test 8");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    // read the tail<a name="line.468"></a>
-<span class="sourceLineNo">469</span>    stm.readFully(11*blockSize+blockSize/2, actual, 0, blockSize/2);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    IOException res = null;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    try { // read beyond the end of the file<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      stm.readFully(11*blockSize+blockSize/2, actual, 0, blockSize);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    } catch (IOException e) {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      // should throw an exception<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      res = e;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    }<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    assertTrue("Error reading beyond file boundary.", res != null);<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>    stm.close();<a name="line.479"></a>
-<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>  private void checkAndEraseData(byte[] actual, int from, byte[] expected, String message) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    for (int idx = 0; idx &lt; actual.length; idx++) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      assertEquals(message+" byte "+(from+idx)+" differs. expected "+<a name="line.484"></a>
-<span class="sourceLineNo">485</span>                        expected[from+idx]+" actual "+actual[idx],<a name="line.485"></a>
-<span class="sourceLineNo">486</span>                        actual[idx], expected[from+idx]);<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      actual[idx] = 0;<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    }<a name="line.488"></a>
-<span class="sourceLineNo">489</span>  }<a name="line.489"></a>
-<span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>  private void doPread(FSDataInputStream stm, long position, byte[] buffer,<a name="line.491"></a>
-<span class="sourceLineNo">49

<TRUNCATED>

[02/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/downloads.html
----------------------------------------------------------------------
diff --git a/downloads.html b/downloads.html
index 78f78da..b081651 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase Downloads</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -366,7 +366,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/export_control.html
----------------------------------------------------------------------
diff --git a/export_control.html b/export_control.html
index ecbf813..644ef34 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Export Control
@@ -331,7 +331,7 @@ for more details.</p>
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/index.html
----------------------------------------------------------------------
diff --git a/index.html b/index.html
index 030deac..4f2d555 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBaseâ„¢ Home</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -411,7 +411,7 @@ Apache HBase is an open-source, distributed, versioned, non-relational database
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/integration.html
----------------------------------------------------------------------
diff --git a/integration.html b/integration.html
index 47c4766..c68ffe9 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; CI Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -291,7 +291,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/issue-tracking.html
----------------------------------------------------------------------
diff --git a/issue-tracking.html b/issue-tracking.html
index 4c79a9a..1d94ba4 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Issue Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -288,7 +288,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/license.html
----------------------------------------------------------------------
diff --git a/license.html b/license.html
index 4df15e8..5846f28 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Licenses</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -491,7 +491,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/mail-lists.html
----------------------------------------------------------------------
diff --git a/mail-lists.html b/mail-lists.html
index e61c1c9..ea6766f 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Mailing Lists</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -341,7 +341,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/metrics.html
----------------------------------------------------------------------
diff --git a/metrics.html b/metrics.html
index c57565c..72b04bd 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) Metrics
@@ -459,7 +459,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/old_news.html
----------------------------------------------------------------------
diff --git a/old_news.html b/old_news.html
index c8cfa09..39160c1 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Old Apache HBase (TM) News
@@ -440,7 +440,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/plugin-management.html
----------------------------------------------------------------------
diff --git a/plugin-management.html b/plugin-management.html
index 9e32b42..5b13da6 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugin Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -440,7 +440,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/plugins.html
----------------------------------------------------------------------
diff --git a/plugins.html b/plugins.html
index e07e163..eddde21 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugins</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -375,7 +375,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/poweredbyhbase.html
----------------------------------------------------------------------
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index 11fa070..2d4b98c 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Powered By Apache HBase™</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -769,7 +769,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/project-info.html
----------------------------------------------------------------------
diff --git a/project-info.html b/project-info.html
index 2fc695d..5c9db2e 100644
--- a/project-info.html
+++ b/project-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -335,7 +335,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/project-reports.html
----------------------------------------------------------------------
diff --git a/project-reports.html b/project-reports.html
index 666bdc8..e67a937 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Generated Reports</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -305,7 +305,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/project-summary.html
----------------------------------------------------------------------
diff --git a/project-summary.html b/project-summary.html
index c3c3374..f2fe451 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Summary</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -331,7 +331,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/pseudo-distributed.html
----------------------------------------------------------------------
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index 039a438..7abdaf4 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
 Running Apache HBase (TM) in pseudo-distributed mode
@@ -308,7 +308,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/replication.html
----------------------------------------------------------------------
diff --git a/replication.html b/replication.html
index 1b9e9a6..882114b 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Apache HBase (TM) Replication
@@ -303,7 +303,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/resources.html
----------------------------------------------------------------------
diff --git a/resources.html b/resources.html
index b034ccf..47294ac 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Other Apache HBase (TM) Resources</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -331,7 +331,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/source-repository.html
----------------------------------------------------------------------
diff --git a/source-repository.html b/source-repository.html
index 43f1528..3cbc870 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Source Code Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -299,7 +299,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/sponsors.html
----------------------------------------------------------------------
diff --git a/sponsors.html b/sponsors.html
index a7e6a23..39db3e5 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase™ Sponsors</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -333,7 +333,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/supportingprojects.html
----------------------------------------------------------------------
diff --git a/supportingprojects.html b/supportingprojects.html
index 368abdc..ca69b60 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Supporting Projects</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -520,7 +520,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/team-list.html
----------------------------------------------------------------------
diff --git a/team-list.html b/team-list.html
index 7977b46..850db45 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Team</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -730,7 +730,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index 14fd2c9..4a7e8b9 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -46888,6 +46888,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/TestByteBuffUtils.html#testCopyAndCompare--">testCopyAndCompare()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/TestByteBuffUtils.html" title="class in org.apache.hadoop.hbase.util">TestByteBuffUtils</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/TestFSUtils.html#testCopyFilesParallel--">testCopyFilesParallel()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/TestFSUtils.html" title="class in org.apache.hadoop.hbase.util">TestFSUtils</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/TestByteBufferUtils.html#testCopyFromArrayToBuffer--">testCopyFromArrayToBuffer()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/TestByteBufferUtils.html" title="class in org.apache.hadoop.hbase.util">TestByteBufferUtils</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/TestByteBufferUtils.html#testCopyFromBuffer--">testCopyFromBuffer()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/TestByteBufferUtils.html" title="class in org.apache.hadoop.hbase.util">TestByteBufferUtils</a></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index ec56445..4837d80 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -158,8 +158,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TestCacheOnWrite.CacheOnWriteType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TagUsage.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TagUsage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TestCacheOnWrite.CacheOnWriteType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
index 36047d6..eb1ab1d 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -570,15 +570,15 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ResourceChecker.Phase.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ResourceChecker.Phase</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HBaseClusterManager.CommandProvider.Operation</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.ACTION.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestDDLMasterFailover.ACTION</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HBaseClusterManager.CommandProvider.Operation</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.RoleCommand.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.RoleCommand</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterManager.ServiceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ScanPerformanceEvaluation.ScanCounter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ScanPerformanceEvaluation.ScanCounter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/PerformanceEvaluation.Counter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">PerformanceEvaluation.Counter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.Stat.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestRegionReplicaPerf.Stat</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterManager.ServiceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.Service.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.Service</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ResourceChecker.Phase.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ResourceChecker.Phase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/PerformanceEvaluation.Counter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">PerformanceEvaluation.Counter</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ScanPerformanceEvaluation.ScanCounter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ScanPerformanceEvaluation.ScanCounter</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 40e3320..e819ba4 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -659,10 +659,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.ActionType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestMultiLogThreshold.ActionType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestCacheOnWriteInSchema.CacheOnWriteType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.Metric.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestRegionServerReadRequestMetrics.Metric</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.Manipulation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DataBlockEncodingTool.Manipulation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.Metric.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestRegionServerReadRequestMetrics.Metric</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestCacheOnWriteInSchema.CacheOnWriteType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.ActionType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestMultiLogThreshold.ActionType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestAtomicOperation.TestStep</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
index 7c9c799..1d0bd63 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
@@ -253,9 +253,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Verify.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Verify.Counts</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Generator.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Generator.Counts</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.Counters.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestLoadAndVerify.Counters</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Verify.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Verify.Counts</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.Counters.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestWithCellVisibilityLoadAndVerify.Counters</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.html b/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.html
index 40b230e..bf7ba6b 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.60">TestFSUtils</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.61">TestFSUtils</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Test <code>FSUtils</code>.</div>
 </li>
@@ -233,51 +233,55 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr id="i7" class="rowColor">
 <td class="colFirst"><code>void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testDeleteAndExists--">testDeleteAndExists</a></span>()</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testCopyFilesParallel--">testCopyFilesParallel</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i8" class="altColor">
 <td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testDeleteAndExists--">testDeleteAndExists</a></span>()</code>&nbsp;</td>
+</tr>
+<tr id="i9" class="rowColor">
+<td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testDFSHedgedReadMetrics--">testDFSHedgedReadMetrics</a></span>()</code>
 <div class="block">Ugly test that ensures we can get at the hedged read counters in dfsclient.</div>
 </td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testFilteredStatusDoesNotThrowOnNotFound--">testFilteredStatusDoesNotThrowOnNotFound</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testIsHDFS--">testIsHDFS</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testPermMask--">testPermMask</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testRenameAndSetModifyTime--">testRenameAndSetModifyTime</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testSetStoragePolicyDefault--">testSetStoragePolicyDefault</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testSetStoragePolicyInvalid--">testSetStoragePolicyInvalid</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testSetStoragePolicyValidButMaybeNotPresent--">testSetStoragePolicyValidButMaybeNotPresent</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i16" class="altColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#testVersion--">testVersion</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i18" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#verifyFileInDirWithStoragePolicy-java.lang.String-">verifyFileInDirWithStoragePolicy</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;policy)</code>&nbsp;</td>
 </tr>
-<tr id="i18" class="altColor">
+<tr id="i19" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/TestFSUtils.html#WriteDataToHDFS-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-int-">WriteDataToHDFS</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                org.apache.hadoop.fs.Path&nbsp;file,
@@ -311,7 +315,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.63">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.64">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -320,7 +324,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.66">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.67">LOG</a></pre>
 </li>
 </ul>
 <a name="htu">
@@ -329,7 +333,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>htu</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.68">htu</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.69">htu</a></pre>
 </li>
 </ul>
 <a name="fs">
@@ -338,7 +342,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.69">fs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.70">fs</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -347,7 +351,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.70">conf</a></pre>
+<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.71">conf</a></pre>
 </li>
 </ul>
 <a name="blockSize">
@@ -356,7 +360,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>blockSize</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.416">blockSize</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.443">blockSize</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.TestFSUtils.blockSize">Constant Field Values</a></dd>
@@ -369,7 +373,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>seed</h4>
-<pre>static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.417">seed</a></pre>
+<pre>static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.444">seed</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.TestFSUtils.seed">Constant Field Values</a></dd>
@@ -382,7 +386,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>STREAM_CAPABILITIES_IS_PRESENT</h4>
-<pre>private static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.529">STREAM_CAPABILITIES_IS_PRESENT</a></pre>
+<pre>private static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.556">STREAM_CAPABILITIES_IS_PRESENT</a></pre>
 </li>
 </ul>
 </li>
@@ -399,7 +403,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestFSUtils</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.60">TestFSUtils</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.61">TestFSUtils</a>()</pre>
 </li>
 </ul>
 </li>
@@ -416,7 +420,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.73">setUp</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.74">setUp</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -430,7 +434,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testIsHDFS</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.79">testIsHDFS</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.80">testIsHDFS</a>()
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -444,7 +448,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>WriteDataToHDFS</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.90">WriteDataToHDFS</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.91">WriteDataToHDFS</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                              org.apache.hadoop.fs.Path&nbsp;file,
                              int&nbsp;dataSize)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
@@ -460,7 +464,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testcomputeHDFSBlocksDistribution</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.98">testcomputeHDFSBlocksDistribution</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.99">testcomputeHDFSBlocksDistribution</a>()
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -474,7 +478,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testVersion</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.202">testVersion</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.203">testVersion</a>()
                  throws org.apache.hadoop.hbase.exceptions.DeserializationException,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -490,7 +494,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testPermMask</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.225">testPermMask</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.226">testPermMask</a>()
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -504,7 +508,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testDeleteAndExists</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.264">testDeleteAndExists</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.265">testDeleteAndExists</a>()
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -518,7 +522,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testFilteredStatusDoesNotThrowOnNotFound</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.293">testFilteredStatusDoesNotThrowOnNotFound</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.294">testFilteredStatusDoesNotThrowOnNotFound</a>()
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -532,7 +536,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testRenameAndSetModifyTime</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.304">testRenameAndSetModifyTime</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.305">testRenameAndSetModifyTime</a>()
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -546,7 +550,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testSetStoragePolicyDefault</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.340">testSetStoragePolicyDefault</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.341">testSetStoragePolicyDefault</a>()
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -560,7 +564,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testSetStoragePolicyValidButMaybeNotPresent</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.346">testSetStoragePolicyValidButMaybeNotPresent</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.347">testSetStoragePolicyValidButMaybeNotPresent</a>()
                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -574,7 +578,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testSetStoragePolicyInvalid</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.352">testSetStoragePolicyInvalid</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.353">testSetStoragePolicyInvalid</a>()
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -588,7 +592,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyFileInDirWithStoragePolicy</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.357">verifyFileInDirWithStoragePolicy</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;policy)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.358">verifyFileInDirWithStoragePolicy</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;policy)
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -602,7 +606,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testDFSHedgedReadMetrics</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.386">testDFSHedgedReadMetrics</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.387">testDFSHedgedReadMetrics</a>()
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <div class="block">Ugly test that ensures we can get at the hedged read counters in dfsclient.
  Does a bit of preading with hedged reads enabled using code taken from hdfs TestPread.</div>
@@ -612,13 +616,27 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
+<a name="testCopyFilesParallel--">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>testCopyFilesParallel</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.418">testCopyFilesParallel</a>()
+                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></code></dd>
+</dl>
+</li>
+</ul>
 <a name="pReadFile-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>pReadFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.419">pReadFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fileSys,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.446">pReadFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fileSys,
                        org.apache.hadoop.fs.Path&nbsp;name)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -633,7 +651,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkAndEraseData</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.482">checkAndEraseData</a>(byte[]&nbsp;actual,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.509">checkAndEraseData</a>(byte[]&nbsp;actual,
                                int&nbsp;from,
                                byte[]&nbsp;expected,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
@@ -645,7 +663,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>doPread</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.491">doPread</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;stm,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.518">doPread</a>(org.apache.hadoop.fs.FSDataInputStream&nbsp;stm,
                      long&nbsp;position,
                      byte[]&nbsp;buffer,
                      int&nbsp;offset,
@@ -663,7 +681,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cleanupFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.522">cleanupFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fileSys,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.549">cleanupFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fileSys,
                          org.apache.hadoop.fs.Path&nbsp;name)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -678,7 +696,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>checkStreamCapabilitiesOnHdfsDataOutputStream</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.545">checkStreamCapabilitiesOnHdfsDataOutputStream</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFSUtils.html#line.572">checkStreamCapabilitiesOnHdfsDataOutputStream</a>()
                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index 434a274..84112c6 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -139,9 +139,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/FaultyFSLog.FailureType.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">FaultyFSLog.FailureType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/IOTestProvider.AllowedOperations.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">IOTestProvider.AllowedOperations</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/TestWALSplit.Corruptions.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">TestWALSplit.Corruptions</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/IOTestProvider.AllowedOperations.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">IOTestProvider.AllowedOperations</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/FaultyFSLog.FailureType.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">FaultyFSLog.FailureType</span></a></li>
 </ul>
 </li>
 </ul>


[25/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
index 90d7295..c9f6fa2 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.744">ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.752">ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader</a>
 extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;</pre>
 </li>
 </ul>
@@ -243,7 +243,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>files</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.746">files</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.754">files</a></pre>
 </li>
 </ul>
 <a name="totalSize">
@@ -252,7 +252,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>totalSize</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.747">totalSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.755">totalSize</a></pre>
 </li>
 </ul>
 <a name="procSize">
@@ -261,7 +261,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>procSize</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.748">procSize</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.756">procSize</a></pre>
 </li>
 </ul>
 <a name="index">
@@ -270,7 +270,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockListLast">
 <li class="blockList">
 <h4>index</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.749">index</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.757">index</a></pre>
 </li>
 </ul>
 </li>
@@ -287,7 +287,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ExportSnapshotRecordReader</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.751">ExportSnapshotRecordReader</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;files)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.759">ExportSnapshotRecordReader</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;files)</pre>
 </li>
 </ul>
 </li>
@@ -304,7 +304,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.759">close</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.767">close</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true#close--" title="class or interface in java.io">close</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></code></dd>
@@ -321,7 +321,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>getCurrentKey</h4>
-<pre>public&nbsp;org.apache.hadoop.io.BytesWritable&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.762">getCurrentKey</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.io.BytesWritable&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.770">getCurrentKey</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>getCurrentKey</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;</code></dd>
@@ -334,7 +334,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>getCurrentValue</h4>
-<pre>public&nbsp;org.apache.hadoop.io.NullWritable&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.765">getCurrentValue</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.io.NullWritable&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.773">getCurrentValue</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>getCurrentValue</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;</code></dd>
@@ -347,7 +347,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>getProgress</h4>
-<pre>public&nbsp;float&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.768">getProgress</a>()</pre>
+<pre>public&nbsp;float&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.776">getProgress</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>getProgress</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;</code></dd>
@@ -360,7 +360,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockList">
 <li class="blockList">
 <h4>initialize</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.771">initialize</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.779">initialize</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
                        org.apache.hadoop.mapreduce.TaskAttemptContext&nbsp;tac)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -374,7 +374,7 @@ extends org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWr
 <ul class="blockListLast">
 <li class="blockList">
 <h4>nextKeyValue</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.774">nextKeyValue</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html#line.782">nextKeyValue</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>nextKeyValue</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;</code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
index ac364bb..1b5f317 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.659">ExportSnapshot.ExportSnapshotInputFormat</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.667">ExportSnapshot.ExportSnapshotInputFormat</a>
 extends org.apache.hadoop.mapreduce.InputFormat&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;</pre>
 </li>
 </ul>
@@ -217,7 +217,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;org.apache.hadoop.io.BytesWri
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ExportSnapshotInputFormat</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.659">ExportSnapshotInputFormat</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.667">ExportSnapshotInputFormat</a>()</pre>
 </li>
 </ul>
 </li>
@@ -234,7 +234,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;org.apache.hadoop.io.BytesWri
 <ul class="blockList">
 <li class="blockList">
 <h4>createRecordReader</h4>
-<pre>public&nbsp;org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.661">createRecordReader</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
+<pre>public&nbsp;org.apache.hadoop.mapreduce.RecordReader&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.669">createRecordReader</a>(org.apache.hadoop.mapreduce.InputSplit&nbsp;split,
                                                                                                                                          org.apache.hadoop.mapreduce.TaskAttemptContext&nbsp;tac)
                                                                                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                                                                                                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -253,7 +253,7 @@ extends org.apache.hadoop.mapreduce.InputFormat&lt;org.apache.hadoop.io.BytesWri
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getSplits</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.667">getSplits</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.mapreduce.InputSplit&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.675">getSplits</a>(org.apache.hadoop.mapreduce.JobContext&nbsp;context)
                                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
index 3ab7f5d..0dd6700 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
@@ -107,7 +107,7 @@
 </dl>
 <hr>
 <br>
-<pre>static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.121">ExportSnapshot.Options</a>
+<pre>static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.129">ExportSnapshot.Options</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -228,7 +228,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SNAPSHOT</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.122">SNAPSHOT</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.130">SNAPSHOT</a></pre>
 </li>
 </ul>
 <a name="TARGET_NAME">
@@ -237,7 +237,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TARGET_NAME</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.123">TARGET_NAME</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.131">TARGET_NAME</a></pre>
 </li>
 </ul>
 <a name="COPY_TO">
@@ -246,7 +246,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>COPY_TO</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.125">COPY_TO</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.133">COPY_TO</a></pre>
 </li>
 </ul>
 <a name="COPY_FROM">
@@ -255,7 +255,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>COPY_FROM</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.127">COPY_FROM</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.135">COPY_FROM</a></pre>
 </li>
 </ul>
 <a name="NO_CHECKSUM_VERIFY">
@@ -264,7 +264,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_CHECKSUM_VERIFY</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.129">NO_CHECKSUM_VERIFY</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.137">NO_CHECKSUM_VERIFY</a></pre>
 </li>
 </ul>
 <a name="NO_TARGET_VERIFY">
@@ -273,7 +273,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_TARGET_VERIFY</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.131">NO_TARGET_VERIFY</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.139">NO_TARGET_VERIFY</a></pre>
 </li>
 </ul>
 <a name="OVERWRITE">
@@ -282,7 +282,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>OVERWRITE</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.133">OVERWRITE</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.141">OVERWRITE</a></pre>
 </li>
 </ul>
 <a name="CHUSER">
@@ -291,7 +291,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CHUSER</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.135">CHUSER</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.143">CHUSER</a></pre>
 </li>
 </ul>
 <a name="CHGROUP">
@@ -300,7 +300,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CHGROUP</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.137">CHGROUP</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.145">CHGROUP</a></pre>
 </li>
 </ul>
 <a name="CHMOD">
@@ -309,7 +309,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CHMOD</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.139">CHMOD</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.147">CHMOD</a></pre>
 </li>
 </ul>
 <a name="MAPPERS">
@@ -318,7 +318,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>MAPPERS</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.141">MAPPERS</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.149">MAPPERS</a></pre>
 </li>
 </ul>
 <a name="BANDWIDTH">
@@ -327,7 +327,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BANDWIDTH</h4>
-<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.143">BANDWIDTH</a></pre>
+<pre>static final&nbsp;org.apache.hbase.thirdparty.org.apache.commons.cli.Option <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.151">BANDWIDTH</a></pre>
 </li>
 </ul>
 </li>
@@ -344,7 +344,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>Options</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.121">Options</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html#line.129">Options</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
index d67a240..9fa2e33 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
@@ -107,7 +107,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.113">ExportSnapshot.Testing</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.121">ExportSnapshot.Testing</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -196,7 +196,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_TEST_FAILURE</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.114">CONF_TEST_FAILURE</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.122">CONF_TEST_FAILURE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.Testing.CONF_TEST_FAILURE">Constant Field Values</a></dd>
@@ -209,7 +209,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_TEST_FAILURE_COUNT</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.115">CONF_TEST_FAILURE_COUNT</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.123">CONF_TEST_FAILURE_COUNT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.Testing.CONF_TEST_FAILURE_COUNT">Constant Field Values</a></dd>
@@ -222,7 +222,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>failuresCountToInject</h4>
-<pre>int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.116">failuresCountToInject</a></pre>
+<pre>int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.124">failuresCountToInject</a></pre>
 </li>
 </ul>
 <a name="injectedFailureCount">
@@ -231,7 +231,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>injectedFailureCount</h4>
-<pre>int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.117">injectedFailureCount</a></pre>
+<pre>int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.125">injectedFailureCount</a></pre>
 </li>
 </ul>
 </li>
@@ -248,7 +248,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>Testing</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.113">Testing</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html#line.121">Testing</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
index 00efef3..1f0eaae 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":9,"i3":9,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10};
+var methods = {"i0":10,"i1":10,"i2":9,"i3":9,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.88">ExportSnapshot</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.92">ExportSnapshot</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractHBaseTool.html" title="class in org.apache.hadoop.hbase.util">AbstractHBaseTool</a>
 implements org.apache.hadoop.util.Tool</pre>
 <div class="block">Export the specified snapshot to a given FileSystem.
@@ -201,61 +201,69 @@ implements org.apache.hadoop.util.Tool</pre>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_CHECKSUM_VERIFY">CONF_CHECKSUM_VERIFY</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
+<td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_COPY_MANIFEST_THREADS">CONF_COPY_MANIFEST_THREADS</a></span></code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_DEST_PREFIX">CONF_DEST_PREFIX</a></span></code>
 <div class="block">Configuration prefix for overrides for the destination filesystem</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_FILES_GROUP">CONF_FILES_GROUP</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_FILES_MODE">CONF_FILES_MODE</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_FILES_USER">CONF_FILES_USER</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_INPUT_ROOT">CONF_INPUT_ROOT</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_MAP_GROUP">CONF_MAP_GROUP</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_MR_JOB_NAME">CONF_MR_JOB_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_NUM_SPLITS">CONF_NUM_SPLITS</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_OUTPUT_ROOT">CONF_OUTPUT_ROOT</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SKIP_TMP">CONF_SKIP_TMP</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SNAPSHOT_DIR">CONF_SNAPSHOT_DIR</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SNAPSHOT_NAME">CONF_SNAPSHOT_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SOURCE_PREFIX">CONF_SOURCE_PREFIX</a></span></code>
 <div class="block">Configuration prefix for overrides for the source filesystem</div>
 </td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static int</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#DEFAULT_COPY_MANIFEST_THREADS">DEFAULT_COPY_MANIFEST_THREADS</a></span></code>&nbsp;</td>
+</tr>
 <tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#filesGroup">filesGroup</a></span></code>&nbsp;</td>
@@ -410,25 +418,28 @@ implements org.apache.hadoop.util.Tool</pre>
 </tr>
 <tr id="i8" class="altColor">
 <td class="colFirst"><code>private void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setOwner-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.lang.String-java.lang.String-boolean-">setOwner</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
-        org.apache.hadoop.fs.Path&nbsp;path,
-        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;user,
-        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;group,
-        boolean&nbsp;recursive)</code>
-<div class="block">Set path ownership.</div>
-</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setConfigParallel-org.apache.hadoop.fs.FileSystem-java.util.List-java.util.function.BiConsumer-org.apache.hadoop.conf.Configuration-">setConfigParallel</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;outputFs,
+                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;traversedPath,
+                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/BiConsumer.html?is-external=true" title="class or interface in java.util.function">BiConsumer</a>&lt;org.apache.hadoop.fs.FileSystem,org.apache.hadoop.fs.Path&gt;&nbsp;task,
+                 org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
 <tr id="i9" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setPermission-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-short-boolean-">setPermission</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
-             org.apache.hadoop.fs.Path&nbsp;path,
-             short&nbsp;filesMode,
-             boolean&nbsp;recursive)</code>
-<div class="block">Set path permission.</div>
-</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setOwnerParallel-org.apache.hadoop.fs.FileSystem-java.lang.String-java.lang.String-org.apache.hadoop.conf.Configuration-java.util.List-">setOwnerParallel</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;outputFs,
+                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filesUser,
+                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filesGroup,
+                org.apache.hadoop.conf.Configuration&nbsp;conf,
+                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;traversedPath)</code>&nbsp;</td>
 </tr>
 <tr id="i10" class="altColor">
 <td class="colFirst"><code>private void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setPermissionParallel-org.apache.hadoop.fs.FileSystem-short-java.util.List-org.apache.hadoop.conf.Configuration-">setPermissionParallel</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;outputFs,
+                     short&nbsp;filesMode,
+                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;traversedPath,
+                     org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
+</tr>
+<tr id="i11" class="rowColor">
+<td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#verifySnapshot-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-">verifySnapshot</a></span>(org.apache.hadoop.conf.Configuration&nbsp;baseConf,
               org.apache.hadoop.fs.FileSystem&nbsp;fs,
               org.apache.hadoop.fs.Path&nbsp;rootDir,
@@ -483,7 +494,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>NAME</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.89">NAME</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.93">NAME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.NAME">Constant Field Values</a></dd>
@@ -496,7 +507,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_SOURCE_PREFIX</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.91">CONF_SOURCE_PREFIX</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.95">CONF_SOURCE_PREFIX</a></pre>
 <div class="block">Configuration prefix for overrides for the source filesystem</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -510,7 +521,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_DEST_PREFIX</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.93">CONF_DEST_PREFIX</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.97">CONF_DEST_PREFIX</a></pre>
 <div class="block">Configuration prefix for overrides for the destination filesystem</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -524,7 +535,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.95">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.99">LOG</a></pre>
 </li>
 </ul>
 <a name="MR_NUM_MAPS">
@@ -533,7 +544,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>MR_NUM_MAPS</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.97">MR_NUM_MAPS</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.101">MR_NUM_MAPS</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.MR_NUM_MAPS">Constant Field Values</a></dd>
@@ -546,7 +557,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_NUM_SPLITS</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.98">CONF_NUM_SPLITS</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.102">CONF_NUM_SPLITS</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_NUM_SPLITS">Constant Field Values</a></dd>
@@ -559,7 +570,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_SNAPSHOT_NAME</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.99">CONF_SNAPSHOT_NAME</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.103">CONF_SNAPSHOT_NAME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SNAPSHOT_NAME">Constant Field Values</a></dd>
@@ -572,7 +583,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_SNAPSHOT_DIR</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.100">CONF_SNAPSHOT_DIR</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.104">CONF_SNAPSHOT_DIR</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SNAPSHOT_DIR">Constant Field Values</a></dd>
@@ -585,7 +596,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_FILES_USER</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.101">CONF_FILES_USER</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.105">CONF_FILES_USER</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_FILES_USER">Constant Field Values</a></dd>
@@ -598,7 +609,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_FILES_GROUP</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.102">CONF_FILES_GROUP</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.106">CONF_FILES_GROUP</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_FILES_GROUP">Constant Field Values</a></dd>
@@ -611,7 +622,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_FILES_MODE</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.103">CONF_FILES_MODE</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.107">CONF_FILES_MODE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_FILES_MODE">Constant Field Values</a></dd>
@@ -624,7 +635,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_CHECKSUM_VERIFY</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.104">CONF_CHECKSUM_VERIFY</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.108">CONF_CHECKSUM_VERIFY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_CHECKSUM_VERIFY">Constant Field Values</a></dd>
@@ -637,7 +648,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_OUTPUT_ROOT</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.105">CONF_OUTPUT_ROOT</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.109">CONF_OUTPUT_ROOT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_OUTPUT_ROOT">Constant Field Values</a></dd>
@@ -650,7 +661,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_INPUT_ROOT</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.106">CONF_INPUT_ROOT</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.110">CONF_INPUT_ROOT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_INPUT_ROOT">Constant Field Values</a></dd>
@@ -663,7 +674,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_BUFFER_SIZE</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.107">CONF_BUFFER_SIZE</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.111">CONF_BUFFER_SIZE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_BUFFER_SIZE">Constant Field Values</a></dd>
@@ -676,7 +687,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_MAP_GROUP</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.108">CONF_MAP_GROUP</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.112">CONF_MAP_GROUP</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_MAP_GROUP">Constant Field Values</a></dd>
@@ -689,7 +700,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_BANDWIDTH_MB</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.109">CONF_BANDWIDTH_MB</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.113">CONF_BANDWIDTH_MB</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_BANDWIDTH_MB">Constant Field Values</a></dd>
@@ -702,7 +713,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_MR_JOB_NAME</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.110">CONF_MR_JOB_NAME</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.114">CONF_MR_JOB_NAME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_MR_JOB_NAME">Constant Field Values</a></dd>
@@ -715,20 +726,42 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_SKIP_TMP</h4>
-<pre>protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.111">CONF_SKIP_TMP</a></pre>
+<pre>protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.115">CONF_SKIP_TMP</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SKIP_TMP">Constant Field Values</a></dd>
 </dl>
 </li>
 </ul>
+<a name="CONF_COPY_MANIFEST_THREADS">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>CONF_COPY_MANIFEST_THREADS</h4>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.116">CONF_COPY_MANIFEST_THREADS</a></pre>
+<dl>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_COPY_MANIFEST_THREADS">Constant Field Values</a></dd>
+</dl>
+</li>
+</ul>
+<a name="DEFAULT_COPY_MANIFEST_THREADS">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>DEFAULT_COPY_MANIFEST_THREADS</h4>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.118">DEFAULT_COPY_MANIFEST_THREADS</a></pre>
+</li>
+</ul>
 <a name="verifyTarget">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyTarget</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.876">verifyTarget</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.901">verifyTarget</a></pre>
 </li>
 </ul>
 <a name="verifyChecksum">
@@ -737,7 +770,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyChecksum</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.877">verifyChecksum</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.902">verifyChecksum</a></pre>
 </li>
 </ul>
 <a name="snapshotName">
@@ -746,7 +779,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>snapshotName</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.878">snapshotName</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.903">snapshotName</a></pre>
 </li>
 </ul>
 <a name="targetName">
@@ -755,7 +788,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>targetName</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.879">targetName</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.904">targetName</a></pre>
 </li>
 </ul>
 <a name="overwrite">
@@ -764,7 +797,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>overwrite</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.880">overwrite</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.905">overwrite</a></pre>
 </li>
 </ul>
 <a name="filesGroup">
@@ -773,7 +806,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>filesGroup</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.881">filesGroup</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.906">filesGroup</a></pre>
 </li>
 </ul>
 <a name="filesUser">
@@ -782,7 +815,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>filesUser</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.882">filesUser</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.907">filesUser</a></pre>
 </li>
 </ul>
 <a name="outputRoot">
@@ -791,7 +824,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>outputRoot</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.883">outputRoot</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.908">outputRoot</a></pre>
 </li>
 </ul>
 <a name="inputRoot">
@@ -800,7 +833,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>inputRoot</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.884">inputRoot</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.909">inputRoot</a></pre>
 </li>
 </ul>
 <a name="bandwidthMB">
@@ -809,7 +842,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>bandwidthMB</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.885">bandwidthMB</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.910">bandwidthMB</a></pre>
 </li>
 </ul>
 <a name="filesMode">
@@ -818,7 +851,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>filesMode</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.886">filesMode</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.911">filesMode</a></pre>
 </li>
 </ul>
 <a name="mappers">
@@ -827,7 +860,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>mappers</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.887">mappers</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.912">mappers</a></pre>
 </li>
 </ul>
 </li>
@@ -844,7 +877,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ExportSnapshot</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.88">ExportSnapshot</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.92">ExportSnapshot</a>()</pre>
 </li>
 </ul>
 </li>
@@ -861,7 +894,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getSnapshotFiles</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.558">getSnapshotFiles</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.566">getSnapshotFiles</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                                                                                                    org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                                                                                    org.apache.hadoop.fs.Path&nbsp;snapshotDir)
                                                                                                                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -880,7 +913,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getBalancedSplits</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.605">getBalancedSplits</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in 
 org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;files,
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.613">getBalancedSplits</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in 
 org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;files,
                                                                                                                                   int&nbsp;ngroups)</pre>
 <div class="block">Given a list of file paths and sizes, create around ngroups in as balanced a way as possible.
  The groups created will have similar amounts of bytes.
@@ -896,7 +929,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>runCopyJob</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.790">runCopyJob</a>(org.apache.hadoop.fs.Path&nbsp;inputRoot,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.798">runCopyJob</a>(org.apache.hadoop.fs.Path&nbsp;inputRoot,
                         org.apache.hadoop.fs.Path&nbsp;outputRoot,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;snapshotName,
                         org.apache.hadoop.fs.Path&nbsp;snapshotDir,
@@ -924,7 +957,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>verifySnapshot</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.835">verifySnapshot</a>(org.apache.hadoop.conf.Configuration&nbsp;baseConf,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.843">verifySnapshot</a>(org.apache.hadoop.conf.Configuration&nbsp;baseConf,
                             org.apache.hadoop.fs.FileSystem&nbsp;fs,
                             org.apache.hadoop.fs.Path&nbsp;rootDir,
                             org.apache.hadoop.fs.Path&nbsp;snapshotDir)
@@ -935,37 +968,52 @@ implements org.apache.hadoop.util.Tool</pre>
 </dl>
 </li>
 </ul>
-<a name="setOwner-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.lang.String-java.lang.String-boolean-">
+<a name="setConfigParallel-org.apache.hadoop.fs.FileSystem-java.util.List-java.util.function.BiConsumer-org.apache.hadoop.conf.Configuration-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>setConfigParallel</h4>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.853">setConfigParallel</a>(org.apache.hadoop.fs.FileSystem&nbsp;outputFs,
+                               <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;traversedPath,
+                               <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/BiConsumer.html?is-external=true" title="class or interface in java.util.function">BiConsumer</a>&lt;org.apache.hadoop.fs.FileSystem,org.apache.hadoop.fs.Path&gt;&nbsp;task,
+                               org.apache.hadoop.conf.Configuration&nbsp;conf)
+                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+</dl>
+</li>
+</ul>
+<a name="setOwnerParallel-org.apache.hadoop.fs.FileSystem-java.lang.String-java.lang.String-org.apache.hadoop.conf.Configuration-java.util.List-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setOwner</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.848">setOwner</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
-                      org.apache.hadoop.fs.Path&nbsp;path,
-                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;user,
-                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;group,
-                      boolean&nbsp;recursive)
-               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Set path ownership.</div>
+<h4>setOwnerParallel</h4>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.873">setOwnerParallel</a>(org.apache.hadoop.fs.FileSystem&nbsp;outputFs,
+                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filesUser,
+                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;filesGroup,
+                              org.apache.hadoop.conf.Configuration&nbsp;conf,
+                              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;traversedPath)
+                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
 </dl>
 </li>
 </ul>
-<a name="setPermission-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-short-boolean-">
+<a name="setPermissionParallel-org.apache.hadoop.fs.FileSystem-short-java.util.List-org.apache.hadoop.conf.Configuration-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>setPermission</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.863">setPermission</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
-                           org.apache.hadoop.fs.Path&nbsp;path,
-                           short&nbsp;filesMode,
-                           boolean&nbsp;recursive)
-                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Set path permission.</div>
+<h4>setPermissionParallel</h4>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.885">setPermissionParallel</a>(org.apache.hadoop.fs.FileSystem&nbsp;outputFs,
+                                   short&nbsp;filesMode,
+                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;traversedPath,
+                                   org.apache.hadoop.conf.Configuration&nbsp;conf)
+                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
@@ -978,7 +1026,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>processOptions</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.890">processOptions</a>(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine&nbsp;cmd)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.915">processOptions</a>(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine&nbsp;cmd)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractHBaseTool.html#processOptions-org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine-">AbstractHBaseTool</a></code></span></div>
 <div class="block">This method is called to process the options after they have been parsed.</div>
 <dl>
@@ -993,7 +1041,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>doWork</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.915">doWork</a>()
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.940">doWork</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Execute the export snapshot by copying the snapshot metadata, hfiles and wals.</div>
 <dl>
@@ -1012,7 +1060,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>printUsage</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1080">printUsage</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1108">printUsage</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractHBaseTool.html#printUsage--">printUsage</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractHBaseTool.html" title="class in org.apache.hadoop.hbase.util">AbstractHBaseTool</a></code></dd>
@@ -1025,7 +1073,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>addOptions</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1093">addOptions</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1121">addOptions</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractHBaseTool.html#addOptions--">AbstractHBaseTool</a></code></span></div>
 <div class="block">Override this to add command-line options using <a href="../../../../../org/apache/hadoop/hbase/util/AbstractHBaseTool.html#addOptWithArg-java.lang.String-java.lang.String-"><code>AbstractHBaseTool.addOptWithArg(java.lang.String, java.lang.String)</code></a>
  and similar methods.</div>
@@ -1041,7 +1089,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>main</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1108">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1136">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 2b64b99..41c2191 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -198,8 +198,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftServerRunner.ImplType</span></a></li>
 </ul>
 </li>


[11/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistribution.addHostsAndBlockWeight(hosts, len);<a name

<TRUNCATED>

[21/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
index 02dbc37..2547651 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
@@ -26,192 +26,198 @@
 <span class="sourceLineNo">018</span> */<a name="line.18"></a>
 <span class="sourceLineNo">019</span>package org.apache.hadoop.hbase.regionserver;<a name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import java.io.IOException;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import java.io.InterruptedIOException;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import java.net.ConnectException;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.net.SocketTimeoutException;<a name="line.24"></a>
-<span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.slf4j.Logger;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.slf4j.LoggerFactory;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.conf.Configuration;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FileSystem;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.Server;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.RetriesExhaustedException;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.ExceptionUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.40"></a>
-<span class="sourceLineNo">041</span><a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>/**<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * This worker is spawned in every regionserver, including master. The Worker waits for log<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * splitting tasks to be put up by the {@link org.apache.hadoop.hbase.master.SplitLogManager}<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * running in the master and races with other workers in other serves to acquire those tasks.<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * The coordination is done via coordination engine.<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * &lt;p&gt;<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * If a worker has successfully moved the task from state UNASSIGNED to OWNED then it owns the task.<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * It keeps heart beating the manager by periodically moving the task from UNASSIGNED to OWNED<a name="line.51"></a>
-<span class="sourceLineNo">052</span> * state. On success it moves the task to TASK_DONE. On unrecoverable error it moves task state to<a name="line.52"></a>
-<span class="sourceLineNo">053</span> * ERR. If it cannot continue but wants the master to retry the task then it moves the task state to<a name="line.53"></a>
-<span class="sourceLineNo">054</span> * RESIGNED.<a name="line.54"></a>
-<span class="sourceLineNo">055</span> * &lt;p&gt;<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * The manager can take a task away from a worker by moving the task from OWNED to UNASSIGNED. In<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * the absence of a global lock there is a unavoidable race here - a worker might have just finished<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * its task when it is stripped of its ownership. Here we rely on the idempotency of the log<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * splitting task for correctness<a name="line.59"></a>
-<span class="sourceLineNo">060</span> */<a name="line.60"></a>
-<span class="sourceLineNo">061</span>@InterfaceAudience.Private<a name="line.61"></a>
-<span class="sourceLineNo">062</span>public class SplitLogWorker implements Runnable {<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  Thread worker;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  // thread pool which executes recovery work<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private SplitLogWorkerCoordination coordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  private Configuration conf;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>  private RegionServerServices server;<a name="line.70"></a>
-<span class="sourceLineNo">071</span><a name="line.71"></a>
-<span class="sourceLineNo">072</span>  public SplitLogWorker(Server hserver, Configuration conf, RegionServerServices server,<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      TaskExecutor splitTaskExecutor) {<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.server = server;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    this.conf = conf;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination();<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    coordination.init(server, conf, splitTaskExecutor, this);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public SplitLogWorker(final Server hserver, final Configuration conf,<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      final RegionServerServices server, final LastSequenceId sequenceIdChecker,<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      final WALFactory factory) {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    this(hserver, conf, server, new TaskExecutor() {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      @Override<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      public Status exec(String filename, CancelableProgressable p) {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>        Path walDir;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>        FileSystem fs;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>        try {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>          walDir = FSUtils.getWALRootDir(conf);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>          fs = walDir.getFileSystem(conf);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        } catch (IOException e) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>          LOG.warn("could not find root dir or fs", e);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>          return Status.RESIGNED;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>        }<a name="line.94"></a>
-<span class="sourceLineNo">095</span>        // TODO have to correctly figure out when log splitting has been<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        // interrupted or has encountered a transient error and when it has<a name="line.96"></a>
-<span class="sourceLineNo">097</span>        // encountered a bad non-retry-able persistent error.<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        try {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>          if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)),<a name="line.99"></a>
-<span class="sourceLineNo">100</span>            fs, conf, p, sequenceIdChecker,<a name="line.100"></a>
-<span class="sourceLineNo">101</span>              server.getCoordinatedStateManager().getSplitLogWorkerCoordination(), factory)) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>            return Status.PREEMPTED;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>          }<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        } catch (InterruptedIOException iioe) {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          LOG.warn("log splitting of " + filename + " interrupted, resigning", iioe);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>          return Status.RESIGNED;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>        } catch (IOException e) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          Throwable cause = e.getCause();<a name="line.108"></a>
-<span class="sourceLineNo">109</span>          if (e instanceof RetriesExhaustedException &amp;&amp; (cause instanceof NotServingRegionException<a name="line.109"></a>
-<span class="sourceLineNo">110</span>                  || cause instanceof ConnectException<a name="line.110"></a>
-<span class="sourceLineNo">111</span>                  || cause instanceof SocketTimeoutException)) {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>            LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, "<a name="line.112"></a>
-<span class="sourceLineNo">113</span>                + "resigning", e);<a name="line.113"></a>
-<span class="sourceLineNo">114</span>            return Status.RESIGNED;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          } else if (cause instanceof InterruptedException) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>            LOG.warn("log splitting of " + filename + " interrupted, resigning", e);<a name="line.116"></a>
-<span class="sourceLineNo">117</span>            return Status.RESIGNED;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>          }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>          LOG.warn("log splitting of " + filename + " failed, returning error", e);<a name="line.119"></a>
-<span class="sourceLineNo">120</span>          return Status.ERR;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>        }<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        return Status.DONE;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      }<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    });<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  @Override<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public void run() {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    try {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      LOG.info("SplitLogWorker " + server.getServerName() + " starting");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      coordination.registerListener();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      // wait for Coordination Engine is ready<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      boolean res = false;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      while (!res &amp;&amp; !coordination.isStop()) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>        res = coordination.isReady();<a name="line.135"></a>
-<span class="sourceLineNo">136</span>      }<a name="line.136"></a>
-<span class="sourceLineNo">137</span>      if (!coordination.isStop()) {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        coordination.taskLoop();<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    } catch (Throwable t) {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      if (ExceptionUtil.isInterrupt(t)) {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        LOG.info("SplitLogWorker interrupted. Exiting. " + (coordination.isStop() ? "" :<a name="line.142"></a>
-<span class="sourceLineNo">143</span>            " (ERROR: exitWorker is not set, exiting anyway)"));<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      } else {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>        // only a logical error can cause here. Printing it out<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        // to make debugging easier<a name="line.146"></a>
-<span class="sourceLineNo">147</span>        LOG.error("unexpected error ", t);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    } finally {<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      coordination.removeListener();<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      LOG.info("SplitLogWorker " + server.getServerName() + " exiting");<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  /**<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * If the worker is doing a task i.e. splitting a log file then stop the task.<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   * It doesn't exit the worker thread.<a name="line.157"></a>
-<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public void stopTask() {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    LOG.info("Sending interrupt to stop the worker thread");<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    worker.interrupt(); // TODO interrupt often gets swallowed, do what else?<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  }<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * start the SplitLogWorker thread<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public void start() {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    worker = new Thread(null, this, "SplitLogWorker-" + server.getServerName().toShortString());<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    worker.start();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /**<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * stop the SplitLogWorker thread<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  public void stop() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    coordination.stopProcessingTasks();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    stopTask();<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  }<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>  /**<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * Objects implementing this interface actually do the task that has been<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   * acquired by a {@link SplitLogWorker}. Since there isn't a water-tight<a name="line.182"></a>
-<span class="sourceLineNo">183</span>   * guarantee that two workers will not be executing the same task therefore it<a name="line.183"></a>
-<span class="sourceLineNo">184</span>   * is better to have workers prepare the task and then have the<a name="line.184"></a>
-<span class="sourceLineNo">185</span>   * {@link org.apache.hadoop.hbase.master.SplitLogManager} commit the work in<a name="line.185"></a>
-<span class="sourceLineNo">186</span>   * SplitLogManager.TaskFinisher<a name="line.186"></a>
-<span class="sourceLineNo">187</span>   */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  public interface TaskExecutor {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    enum Status {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      DONE(),<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      ERR(),<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      RESIGNED(),<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      PREEMPTED()<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    Status exec(String name, CancelableProgressable p);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  /**<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * Returns the number of tasks processed by coordination.<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * This method is used by tests only<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  @VisibleForTesting<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  public int getTaskReadySeq() {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    return coordination.getTaskReadySeq();<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  }<a name="line.205"></a>
-<span class="sourceLineNo">206</span>}<a name="line.206"></a>
+<span class="sourceLineNo">021</span>import java.io.FileNotFoundException;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.IOException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.InterruptedIOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.net.ConnectException;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.net.SocketTimeoutException;<a name="line.25"></a>
+<span class="sourceLineNo">026</span><a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.slf4j.Logger;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.slf4j.LoggerFactory;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.conf.Configuration;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FileSystem;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.Path;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Server;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.RetriesExhaustedException;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.ExceptionUtil;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.41"></a>
+<span class="sourceLineNo">042</span><a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>/**<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * This worker is spawned in every regionserver, including master. The Worker waits for log<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * splitting tasks to be put up by the {@link org.apache.hadoop.hbase.master.SplitLogManager}<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * running in the master and races with other workers in other serves to acquire those tasks.<a name="line.48"></a>
+<span class="sourceLineNo">049</span> * The coordination is done via coordination engine.<a name="line.49"></a>
+<span class="sourceLineNo">050</span> * &lt;p&gt;<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * If a worker has successfully moved the task from state UNASSIGNED to OWNED then it owns the task.<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * It keeps heart beating the manager by periodically moving the task from UNASSIGNED to OWNED<a name="line.52"></a>
+<span class="sourceLineNo">053</span> * state. On success it moves the task to TASK_DONE. On unrecoverable error it moves task state to<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * ERR. If it cannot continue but wants the master to retry the task then it moves the task state to<a name="line.54"></a>
+<span class="sourceLineNo">055</span> * RESIGNED.<a name="line.55"></a>
+<span class="sourceLineNo">056</span> * &lt;p&gt;<a name="line.56"></a>
+<span class="sourceLineNo">057</span> * The manager can take a task away from a worker by moving the task from OWNED to UNASSIGNED. In<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * the absence of a global lock there is a unavoidable race here - a worker might have just finished<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * its task when it is stripped of its ownership. Here we rely on the idempotency of the log<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * splitting task for correctness<a name="line.60"></a>
+<span class="sourceLineNo">061</span> */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>@InterfaceAudience.Private<a name="line.62"></a>
+<span class="sourceLineNo">063</span>public class SplitLogWorker implements Runnable {<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  Thread worker;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  // thread pool which executes recovery work<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  private SplitLogWorkerCoordination coordination;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private Configuration conf;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  private RegionServerServices server;<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  public SplitLogWorker(Server hserver, Configuration conf, RegionServerServices server,<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      TaskExecutor splitTaskExecutor) {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    this.server = server;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    this.conf = conf;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination();<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    coordination.init(server, conf, splitTaskExecutor, this);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  public SplitLogWorker(final Server hserver, final Configuration conf,<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      final RegionServerServices server, final LastSequenceId sequenceIdChecker,<a name="line.82"></a>
+<span class="sourceLineNo">083</span>      final WALFactory factory) {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    this(hserver, conf, server, new TaskExecutor() {<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      @Override<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      public Status exec(String filename, CancelableProgressable p) {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>        Path walDir;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>        FileSystem fs;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        try {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>          walDir = FSUtils.getWALRootDir(conf);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>          fs = walDir.getFileSystem(conf);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        } catch (IOException e) {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>          LOG.warn("could not find root dir or fs", e);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>          return Status.RESIGNED;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>        }<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        // TODO have to correctly figure out when log splitting has been<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        // interrupted or has encountered a transient error and when it has<a name="line.97"></a>
+<span class="sourceLineNo">098</span>        // encountered a bad non-retry-able persistent error.<a name="line.98"></a>
+<span class="sourceLineNo">099</span>        try {<a name="line.99"></a>
+<span class="sourceLineNo">100</span>          if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)),<a name="line.100"></a>
+<span class="sourceLineNo">101</span>            fs, conf, p, sequenceIdChecker,<a name="line.101"></a>
+<span class="sourceLineNo">102</span>              server.getCoordinatedStateManager().getSplitLogWorkerCoordination(), factory)) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>            return Status.PREEMPTED;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          }<a name="line.104"></a>
+<span class="sourceLineNo">105</span>        } catch (InterruptedIOException iioe) {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          LOG.warn("log splitting of " + filename + " interrupted, resigning", iioe);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          return Status.RESIGNED;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>        } catch (IOException e) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>          if (e instanceof FileNotFoundException) {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>            // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.110"></a>
+<span class="sourceLineNo">111</span>            LOG.warn("WAL {} does not exist anymore", filename, e);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>            return Status.DONE;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>          }<a name="line.113"></a>
+<span class="sourceLineNo">114</span>          Throwable cause = e.getCause();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>          if (e instanceof RetriesExhaustedException &amp;&amp; (cause instanceof NotServingRegionException<a name="line.115"></a>
+<span class="sourceLineNo">116</span>                  || cause instanceof ConnectException<a name="line.116"></a>
+<span class="sourceLineNo">117</span>                  || cause instanceof SocketTimeoutException)) {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>            LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, "<a name="line.118"></a>
+<span class="sourceLineNo">119</span>                + "resigning", e);<a name="line.119"></a>
+<span class="sourceLineNo">120</span>            return Status.RESIGNED;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>          } else if (cause instanceof InterruptedException) {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>            LOG.warn("log splitting of " + filename + " interrupted, resigning", e);<a name="line.122"></a>
+<span class="sourceLineNo">123</span>            return Status.RESIGNED;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          }<a name="line.124"></a>
+<span class="sourceLineNo">125</span>          LOG.warn("log splitting of " + filename + " failed, returning error", e);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>          return Status.ERR;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        return Status.DONE;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    });<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public void run() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    try {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      LOG.info("SplitLogWorker " + server.getServerName() + " starting");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>      coordination.registerListener();<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      // wait for Coordination Engine is ready<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      boolean res = false;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      while (!res &amp;&amp; !coordination.isStop()) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>        res = coordination.isReady();<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      }<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      if (!coordination.isStop()) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        coordination.taskLoop();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    } catch (Throwable t) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      if (ExceptionUtil.isInterrupt(t)) {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        LOG.info("SplitLogWorker interrupted. Exiting. " + (coordination.isStop() ? "" :<a name="line.148"></a>
+<span class="sourceLineNo">149</span>            " (ERROR: exitWorker is not set, exiting anyway)"));<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      } else {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        // only a logical error can cause here. Printing it out<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        // to make debugging easier<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        LOG.error("unexpected error ", t);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    } finally {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      coordination.removeListener();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      LOG.info("SplitLogWorker " + server.getServerName() + " exiting");<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  /**<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   * If the worker is doing a task i.e. splitting a log file then stop the task.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>   * It doesn't exit the worker thread.<a name="line.163"></a>
+<span class="sourceLineNo">164</span>   */<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  public void stopTask() {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    LOG.info("Sending interrupt to stop the worker thread");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    worker.interrupt(); // TODO interrupt often gets swallowed, do what else?<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
+<span class="sourceLineNo">169</span><a name="line.169"></a>
+<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   * start the SplitLogWorker thread<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  public void start() {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    worker = new Thread(null, this, "SplitLogWorker-" + server.getServerName().toShortString());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    worker.start();<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  /**<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * stop the SplitLogWorker thread<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   */<a name="line.180"></a>
+<span class="sourceLineNo">181</span>  public void stop() {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    coordination.stopProcessingTasks();<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    stopTask();<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * Objects implementing this interface actually do the task that has been<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * acquired by a {@link SplitLogWorker}. Since there isn't a water-tight<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * guarantee that two workers will not be executing the same task therefore it<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * is better to have workers prepare the task and then have the<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * {@link org.apache.hadoop.hbase.master.SplitLogManager} commit the work in<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * SplitLogManager.TaskFinisher<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  public interface TaskExecutor {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    enum Status {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      DONE(),<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      ERR(),<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      RESIGNED(),<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      PREEMPTED()<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    Status exec(String name, CancelableProgressable p);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>  /**<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * Returns the number of tasks processed by coordination.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * This method is used by tests only<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   */<a name="line.207"></a>
+<span class="sourceLineNo">208</span>  @VisibleForTesting<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public int getTaskReadySeq() {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    return coordination.getTaskReadySeq();<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>}<a name="line.212"></a>
 
 
 


[24/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html b/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
index f632363..7a332b1 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
@@ -290,7 +290,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html" title
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html" title="class in org.apache.hadoop.hbase.util">FSUtils</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#addToHDFSBlocksDistribution-org.apache.hadoop.hbase.HDFSBlocksDistribution-org.apache.hadoop.fs.BlockLocation:A-">addToHDFSBlocksDistribution</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkAccess-org.apache.hadoop.security.UserGroupInformation-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.fs.permission.FsAction-">checkAccess</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkClusterIdExists-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-int-">checkClusterIdExists</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkDfsSafeMode-org.apache.hadoop.conf.Configuration-">checkDfsSafeMode</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkFileSystemAvailable-org.apache.hadoop.fs.FileSystem-">checkFileSystemAvailable</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkShortCircuitR
 eadBufferSize-org.apache.hadoop.conf.Configuration-">checkShortCircuitReadBufferSize</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-boolean-">checkVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-boolean-int-int-">checkVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#computeHDFSBlocksDistribution-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.FileStatus-long-long-">computeHDFSBlocksDistribution</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#create-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.permission.FsPermission-java.net.InetSocketAddress:A-">create</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#deleteRegionDir-org.apache.hadoop.conf.C
 onfiguration-org.apache.hadoop.hbase.HRegionInfo-">deleteRegionDir</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#filterFileStatuses-org.apache.hadoop.fs.FileStatus:A-org.apache.hadoop.hbase.util.FileStatusFilter-">filterFileStatuses</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#filterFileStatuses-java.util.Iterator-org.apache.hadoop.hbase.util.FileStatusFilter-">filterFileStatuses</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getClusterId-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getDFSHedgedReadMetrics-org.apache.hadoop.conf.Configuration-">getDFSHedgedReadMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getFamilyDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getFamilyDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getInstance-org.ap
 ache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-">getInstance</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getLocalTableDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getLocalTableDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getReferenceFilePaths-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getReferenceFilePaths</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDegreeLocalityMappingFromFS-org.apache.hadoop.conf.Configuration-">getRegionDegreeLocalityMappingFromFS</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDegreeLocalityMappingFromFS-org.apache.hadoop.conf.Configuration-java.lang.String-int-">getRegionDegreeLocalityMappingFromFS</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-">getRegionDir</a>, <a href="../../..
 /../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getRegionDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionReferenceFileCount-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getRegionReferenceFileCount</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableFragmentation-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableFragmentation</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableFragmentation-org.apache.hadoop.hbase.master.HMaster-">getTableFragmentation</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableStoreFilePathMap</a>,
  <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.PathFilter-java.util.concurrent.ExecutorService-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-">getTableStoreFilePathMap</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-java.util.Map-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-">getTableStoreFilePathMap</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-java.util.Map-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-org.apache.hadoop.fs.PathFilter-java.util.concurrent.ExecutorService-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-">getTableStoreFilePathMap</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTotalTableFragmentation-org.apache.hadoop.hb
 ase.master.HMaster-">getTotalTableFragmentation</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#isDistributedFileSystem-org.apache.hadoop.fs.FileSystem-">isDistributedFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#isMatchingTail-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-">isMatchingTail</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#listStatusWithStatusFilter-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.util.FileStatusFilter-">listStatusWithStatusFilter</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#metaRegionExists-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">metaRegionExists</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#parseVersionFrom-byte
 :A-">parseVersionFrom</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setClusterId-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.ClusterId-int-">setClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setupShortCircuitRead-org.apache.hadoop.conf.Configuration-">setupShortCircuitRead</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">setVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-int-int-">setVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.lang.String-int-int-">setVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#toVersionByteArray-java.lang.String-">toVersionByteArray</a>, <
 a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#waitOnSafeMode-org.apache.hadoop.conf.Configuration-long-">waitOnSafeMode</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#addToHDFSBlocksDistribution-org.apache.hadoop.hbase.HDFSBlocksDistribution-org.apache.hadoop.fs.BlockLocation:A-">addToHDFSBlocksDistribution</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkAccess-org.apache.hadoop.security.UserGroupInformation-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.fs.permission.FsAction-">checkAccess</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkClusterIdExists-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-int-">checkClusterIdExists</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkDfsSafeMode-org.apache.hadoop.conf.Configuration-">checkDfsSafeMode</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkFileSystemAvailable-org.apache.hadoop.fs.FileSystem-">checkFileSystemAvailable</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkShortCircuitR
 eadBufferSize-org.apache.hadoop.conf.Configuration-">checkShortCircuitReadBufferSize</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-boolean-">checkVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#checkVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-boolean-int-int-">checkVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#computeHDFSBlocksDistribution-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.FileStatus-long-long-">computeHDFSBlocksDistribution</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#copyFilesParallel-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.conf.Configuration-int-">copyFilesParallel</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#create-org.apache.hadoop.conf.
 Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.permission.FsPermission-java.net.InetSocketAddress:A-">create</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#deleteRegionDir-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.HRegionInfo-">deleteRegionDir</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#filterFileStatuses-org.apache.hadoop.fs.FileStatus:A-org.apache.hadoop.hbase.util.FileStatusFilter-">filterFileStatuses</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#filterFileStatuses-java.util.Iterator-org.apache.hadoop.hbase.util.FileStatusFilter-">filterFileStatuses</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getClusterId-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getDFSHedgedReadMetrics-org.apache.hadoop.conf.Configuration-">getDFS
 HedgedReadMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getFamilyDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getFamilyDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getInstance-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-">getInstance</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getLocalTableDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getLocalTableDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getReferenceFilePaths-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getReferenceFilePaths</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDegreeLocalityMappingFromFS-org.apache.hadoop.conf.Configuration-">getRegionDegreeLocalityMappingFromFS</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDegreeLocalityMappingFromFS-org.apache.hadoop.co
 nf.Configuration-java.lang.String-int-">getRegionDegreeLocalityMappingFromFS</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-">getRegionDir</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getRegionDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionReferenceFileCount-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getRegionReferenceFileCount</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableDirs</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableFragmentation-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableFragmentation</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getT
 ableFragmentation-org.apache.hadoop.hbase.master.HMaster-">getTableFragmentation</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableStoreFilePathMap</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.PathFilter-java.util.concurrent.ExecutorService-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-">getTableStoreFilePathMap</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-java.util.Map-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-">getTableStoreFilePathMap</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-java.util.Map-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-o
 rg.apache.hadoop.fs.PathFilter-java.util.concurrent.ExecutorService-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-">getTableStoreFilePathMap</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTotalTableFragmentation-org.apache.hadoop.hbase.master.HMaster-">getTotalTableFragmentation</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#isDistributedFileSystem-org.apache.hadoop.fs.FileSystem-">isDistributedFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#isMatchingTail-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-">isMatchingTail</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#listStatusWithStatusFilter-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.util.FileStatusFilter-">listStatusWithStat
 usFilter</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#metaRegionExists-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">metaRegionExists</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#parseVersionFrom-byte:A-">parseVersionFrom</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setClusterId-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.ClusterId-int-">setClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setupShortCircuitRead-org.apache.hadoop.conf.Configuration-">setupShortCircuitRead</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">setVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-int-int-">setVersion</a>, <a href="../../../../../org/apache/hadoop
 /hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.lang.String-int-int-">setVersion</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#toVersionByteArray-java.lang.String-">toVersionByteArray</a>, <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#waitOnSafeMode-org.apache.hadoop.conf.Configuration-long-">waitOnSafeMode</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.util.CommonFSUtils">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
index 7fe5594..b8d07ef 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.868">FSUtils.BlackListDirFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.870">FSUtils.BlackListDirFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html" title="class in org.apache.hadoop.hbase.util">AbstractFileStatusFilter</a></pre>
 <div class="block">Directory filter that doesn't include any of the directories in the specified blacklist</div>
 </li>
@@ -236,7 +236,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockList">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.869">fs</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.871">fs</a></pre>
 </li>
 </ul>
 <a name="blacklist">
@@ -245,7 +245,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>blacklist</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.870">blacklist</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.872">blacklist</a></pre>
 </li>
 </ul>
 </li>
@@ -262,7 +262,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BlackListDirFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.879">BlackListDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.881">BlackListDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;directoryNameBlackList)</pre>
 <div class="block">Create a filter on the givem filesystem with the specified blacklist</div>
 <dl>
@@ -287,7 +287,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockList">
 <li class="blockList">
 <h4>accept</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.887">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.889">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
                          @CheckForNull
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;isDir)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html#accept-org.apache.hadoop.fs.Path-java.lang.Boolean-">AbstractFileStatusFilter</a></code></span></div>
@@ -310,7 +310,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isValidName</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.901">isValidName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#line.903">isValidName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
index 74eb003..2bfe852 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html
@@ -121,7 +121,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.909">FSUtils.DirFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.911">FSUtils.DirFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html" title="class in org.apache.hadoop.hbase.util">FSUtils.BlackListDirFilter</a></pre>
 <div class="block">A <code>PathFilter</code> that only allows directories.</div>
 </li>
@@ -194,7 +194,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDi
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DirFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html#line.911">DirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.DirFilter.html#line.913">DirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
index 890b251..15497d0 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1039">FSUtils.FamilyDirFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1041">FSUtils.FamilyDirFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html" title="class in org.apache.hadoop.hbase.util">AbstractFileStatusFilter</a></pre>
 <div class="block">Filter for all dirs that are legal column family names.  This is generally used for colfam
  dirs &lt;hbase.rootdir&gt;/&lt;tabledir&gt;/&lt;regiondir&gt;/&lt;colfamdir&gt;.</div>
@@ -222,7 +222,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html#line.1040">fs</a></pre>
+<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html#line.1042">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -239,7 +239,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FamilyDirFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html#line.1042">FamilyDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html#line.1044">FamilyDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -256,7 +256,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>accept</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html#line.1047">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html#line.1049">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
                          @CheckForNull
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;isDir)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html#accept-org.apache.hadoop.fs.Path-java.lang.Boolean-">AbstractFileStatusFilter</a></code></span></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
index 6b41ec4..71f43c7 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.847">FSUtils.FileFilter</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.849">FSUtils.FileFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html" title="class in org.apache.hadoop.hbase.util">AbstractFileStatusFilter</a></pre>
 <div class="block">A <code>PathFilter</code> that returns only regular files.</div>
 </li>
@@ -221,7 +221,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html#line.848">fs</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html#line.850">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -238,7 +238,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html#line.850">FileFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html#line.852">FileFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -255,7 +255,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>accept</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html#line.855">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html#line.857">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
                          @CheckForNull
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;isDir)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html#accept-org.apache.hadoop.fs.Path-java.lang.Boolean-">AbstractFileStatusFilter</a></code></span></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
index b915f9e..7055cca 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1101">FSUtils.HFileFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1103">FSUtils.HFileFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html" title="class in org.apache.hadoop.hbase.util">AbstractFileStatusFilter</a></pre>
 <div class="block">Filter for HFiles that excludes reference files.</div>
 </li>
@@ -221,7 +221,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html#line.1102">fs</a></pre>
+<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html#line.1104">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -238,7 +238,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html#line.1104">HFileFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html#line.1106">HFileFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -255,7 +255,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>accept</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html#line.1109">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html#line.1111">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
                          @CheckForNull
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;isDir)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html#accept-org.apache.hadoop.fs.Path-java.lang.Boolean-">AbstractFileStatusFilter</a></code></span></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
index e7c0669..7fc3f76 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1128">FSUtils.HFileLinkFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1130">FSUtils.HFileLinkFilter</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements org.apache.hadoop.fs.PathFilter</pre>
 <div class="block">Filter for HFileLinks (StoreFiles and HFiles not included).
@@ -189,7 +189,7 @@ implements org.apache.hadoop.fs.PathFilter</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HFileLinkFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html#line.1128">HFileLinkFilter</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html#line.1130">HFileLinkFilter</a>()</pre>
 </li>
 </ul>
 </li>
@@ -206,7 +206,7 @@ implements org.apache.hadoop.fs.PathFilter</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>accept</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html#line.1131">accept</a>(org.apache.hadoop.fs.Path&nbsp;p)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html#line.1133">accept</a>(org.apache.hadoop.fs.Path&nbsp;p)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>accept</code>&nbsp;in interface&nbsp;<code>org.apache.hadoop.fs.PathFilter</code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
index 8f8588b..b885a2a 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1136">FSUtils.ReferenceFileFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1138">FSUtils.ReferenceFileFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html" title="class in org.apache.hadoop.hbase.util">AbstractFileStatusFilter</a></pre>
 </li>
 </ul>
@@ -220,7 +220,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html#line.1138">fs</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html#line.1140">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -237,7 +237,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ReferenceFileFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html#line.1140">ReferenceFileFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html#line.1142">ReferenceFileFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -254,7 +254,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>accept</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html#line.1145">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html#line.1147">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
                          @CheckForNull
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;isDir)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html#accept-org.apache.hadoop.fs.Path-java.lang.Boolean-">AbstractFileStatusFilter</a></code></span></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
index 7aaf203..590afd1 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.984">FSUtils.RegionDirFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.986">FSUtils.RegionDirFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html" title="class in org.apache.hadoop.hbase.util">AbstractFileStatusFilter</a></pre>
 <div class="block">Filter for all dirs that don't start with '.'</div>
 </li>
@@ -225,7 +225,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockList">
 <li class="blockList">
 <h4>regionDirPattern</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true" title="class or interface in java.util.regex">Pattern</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.986">regionDirPattern</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true" title="class or interface in java.util.regex">Pattern</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.988">regionDirPattern</a></pre>
 </li>
 </ul>
 <a name="fs">
@@ -234,7 +234,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.987">fs</a></pre>
+<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.989">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -251,7 +251,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RegionDirFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.989">RegionDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.991">RegionDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -268,7 +268,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusF
 <ul class="blockListLast">
 <li class="blockList">
 <h4>accept</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.994">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html#line.996">accept</a>(org.apache.hadoop.fs.Path&nbsp;p,
                          @CheckForNull
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&nbsp;isDir)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/AbstractFileStatusFilter.html#accept-org.apache.hadoop.fs.Path-java.lang.Boolean-">AbstractFileStatusFilter</a></code></span></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
index 2de792d..d55f44d 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.920">FSUtils.UserTableDirFilter</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.922">FSUtils.UserTableDirFilter</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html" title="class in org.apache.hadoop.hbase.util">FSUtils.BlackListDirFilter</a></pre>
 <div class="block">A <code>PathFilter</code> that returns usertable directories. To get all directories use the
  <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html" title="class in org.apache.hadoop.hbase.util"><code>FSUtils.BlackListDirFilter</code></a> with a <tt>null</tt> blacklist</div>
@@ -212,7 +212,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDi
 <ul class="blockListLast">
 <li class="blockList">
 <h4>UserTableDirFilter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html#line.921">UserTableDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html#line.923">UserTableDirFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</pre>
 </li>
 </ul>
 </li>
@@ -229,7 +229,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDi
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isValidName</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html#line.926">isValidName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html#line.928">isValidName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html#isValidName-java.lang.String-">isValidName</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html" title="class in org.apache.hadoop.hbase.util">FSUtils.BlackListDirFilter</a></code></dd>


[09/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FileFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistribution.addHostsAndBlockWeight(hosts, len);<a

<TRUNCATED>

[13/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        LOG.error("Unable to get the status for source file=" + fileInfo.

<TRUNCATED>

[23/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
index e7187df..c62b5fd 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":6,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":6,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.102">FSUtils</a>
+public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.104">FSUtils</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html" title="class in org.apache.hadoop.hbase.util">CommonFSUtils</a></pre>
 <div class="block">Utility methods for interacting with the underlying file system.</div>
 </li>
@@ -347,6 +347,25 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;user)</code>&nbsp;</td>
 </tr>
 <tr id="i10" class="altColor">
+<td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#copyFiles-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.conf.Configuration-java.util.concurrent.ExecutorService-java.util.List-">copyFiles</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;srcFS,
+         org.apache.hadoop.fs.Path&nbsp;src,
+         org.apache.hadoop.fs.FileSystem&nbsp;dstFS,
+         org.apache.hadoop.fs.Path&nbsp;dst,
+         org.apache.hadoop.conf.Configuration&nbsp;conf,
+         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a>&nbsp;pool,
+         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true" title="class or interface in java.util.concurrent">Future</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;&gt;&nbsp;futures)</code>&nbsp;</td>
+</tr>
+<tr id="i11" class="rowColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#copyFilesParallel-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.conf.Configuration-int-">copyFilesParallel</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;srcFS,
+                 org.apache.hadoop.fs.Path&nbsp;src,
+                 org.apache.hadoop.fs.FileSystem&nbsp;dstFS,
+                 org.apache.hadoop.fs.Path&nbsp;dst,
+                 org.apache.hadoop.conf.Configuration&nbsp;conf,
+                 int&nbsp;threads)</code>&nbsp;</td>
+</tr>
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>static org.apache.hadoop.fs.FSDataOutputStream</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#create-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.permission.FsPermission-java.net.InetSocketAddress:A-">create</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
       org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -356,61 +375,61 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <div class="block">Create the specified file on the filesystem.</div>
 </td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#deleteRegionDir-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.HRegionInfo-">deleteRegionDir</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a>&nbsp;hri)</code>
 <div class="block">Delete the region directory if exists.</div>
 </td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#filterFileStatuses-org.apache.hadoop.fs.FileStatus:A-org.apache.hadoop.hbase.util.FileStatusFilter-">filterFileStatuses</a></span>(org.apache.hadoop.fs.FileStatus[]&nbsp;input,
                   <a href="../../../../../org/apache/hadoop/hbase/util/FileStatusFilter.html" title="interface in org.apache.hadoop.hbase.util">FileStatusFilter</a>&nbsp;filter)</code>
 <div class="block">Filters FileStatuses in an array and returns a list</div>
 </td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#filterFileStatuses-java.util.Iterator-org.apache.hadoop.hbase.util.FileStatusFilter-">filterFileStatuses</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;input,
                   <a href="../../../../../org/apache/hadoop/hbase/util/FileStatusFilter.html" title="interface in org.apache.hadoop.hbase.util">FileStatusFilter</a>&nbsp;filter)</code>
 <div class="block">Filters FileStatuses in an iterator and returns a list</div>
 </td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ClusterId.html" title="class in org.apache.hadoop.hbase">ClusterId</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getClusterId-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getClusterId</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
             org.apache.hadoop.fs.Path&nbsp;rootdir)</code>
 <div class="block">Returns the value of the unique cluster ID stored for this HBase instance.</div>
 </td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>static org.apache.hadoop.hdfs.DFSHedgedReadMetrics</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getDFSHedgedReadMetrics-org.apache.hadoop.conf.Configuration-">getDFSHedgedReadMetrics</a></span>(org.apache.hadoop.conf.Configuration&nbsp;c)</code>&nbsp;</td>
 </tr>
-<tr id="i16" class="altColor">
+<tr id="i18" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getFamilyDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getFamilyDirs</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
              org.apache.hadoop.fs.Path&nbsp;regionDir)</code>
 <div class="block">Given a particular region dir, return all the familydirs inside it</div>
 </td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i19" class="rowColor">
 <td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html" title="class in org.apache.hadoop.hbase.util">FSUtils</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getInstance-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-">getInstance</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
            org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
-<tr id="i18" class="altColor">
+<tr id="i20" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getLocalTableDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getLocalTableDirs</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                  org.apache.hadoop.fs.Path&nbsp;rootdir)</code>&nbsp;</td>
 </tr>
-<tr id="i19" class="rowColor">
+<tr id="i21" class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getReferenceFilePaths-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getReferenceFilePaths</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                      org.apache.hadoop.fs.Path&nbsp;familyDir)</code>&nbsp;</td>
 </tr>
-<tr id="i20" class="altColor">
+<tr id="i22" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true" title="class or interface in java.lang">Float</a>&gt;&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDegreeLocalityMappingFromFS-org.apache.hadoop.conf.Configuration-">getRegionDegreeLocalityMappingFromFS</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
 <div class="block">This function is to scan the root path of the file system to get the
@@ -418,7 +437,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  one block of that region.</div>
 </td>
 </tr>
-<tr id="i21" class="rowColor">
+<tr id="i23" class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true" title="class or interface in java.lang">Float</a>&gt;&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDegreeLocalityMappingFromFS-org.apache.hadoop.conf.Configuration-java.lang.String-int-">getRegionDegreeLocalityMappingFromFS</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;desiredTable,
@@ -428,12 +447,12 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  one block of that region.</div>
 </td>
 </tr>
-<tr id="i22" class="altColor">
+<tr id="i24" class="altColor">
 <td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDir-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.client.RegionInfo-">getRegionDir</a></span>(org.apache.hadoop.fs.Path&nbsp;tableDir,
             <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region)</code>&nbsp;</td>
 </tr>
-<tr id="i23" class="rowColor">
+<tr id="i25" class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getRegionDirs</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
              org.apache.hadoop.fs.Path&nbsp;tableDir)</code>
@@ -441,7 +460,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  .tableinfo</div>
 </td>
 </tr>
-<tr id="i24" class="altColor">
+<tr id="i26" class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionLocalityMappingFromFS-org.apache.hadoop.conf.Configuration-java.lang.String-int-java.util.Map-java.util.Map-">getRegionLocalityMappingFromFS</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;desiredTable,
@@ -454,17 +473,17 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  one block of that region.</div>
 </td>
 </tr>
-<tr id="i25" class="rowColor">
+<tr id="i27" class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getRegionReferenceFileCount-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getRegionReferenceFileCount</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                            org.apache.hadoop.fs.Path&nbsp;p)</code>&nbsp;</td>
 </tr>
-<tr id="i26" class="altColor">
+<tr id="i28" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableDirs-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableDirs</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
             org.apache.hadoop.fs.Path&nbsp;rootdir)</code>&nbsp;</td>
 </tr>
-<tr id="i27" class="rowColor">
+<tr id="i29" class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableFragmentation-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableFragmentation</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                      org.apache.hadoop.fs.Path&nbsp;hbaseRootDir)</code>
@@ -472,14 +491,14 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  have more than one file in them.</div>
 </td>
 </tr>
-<tr id="i28" class="altColor">
+<tr id="i30" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableFragmentation-org.apache.hadoop.hbase.master.HMaster-">getTableFragmentation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)</code>
 <div class="block">Runs through the HBase rootdir and checks how many stores for each table
  have more than one file in them.</div>
 </td>
 </tr>
-<tr id="i29" class="rowColor">
+<tr id="i31" class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getTableStoreFilePathMap</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                         org.apache.hadoop.fs.Path&nbsp;hbaseRootDir)</code>
@@ -487,7 +506,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  table StoreFile names to the full Path.</div>
 </td>
 </tr>
-<tr id="i30" class="altColor">
+<tr id="i32" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.PathFilter-java.util.concurrent.ExecutorService-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-">getTableStoreFilePathMap</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                         org.apache.hadoop.fs.Path&nbsp;hbaseRootDir,
@@ -498,7 +517,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  table StoreFile names to the full Path.</div>
 </td>
 </tr>
-<tr id="i31" class="rowColor">
+<tr id="i33" class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-java.util.Map-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-">getTableStoreFilePathMap</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;map,
                         org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -508,7 +527,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  table StoreFile names to the full Path.</div>
 </td>
 </tr>
-<tr id="i32" class="altColor">
+<tr id="i34" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTableStoreFilePathMap-java.util.Map-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.TableName-org.apache.hadoop.fs.PathFilter-java.util.concurrent.ExecutorService-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-">getTableStoreFilePathMap</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;resultMap,
                         org.apache.hadoop.fs.FileSystem&nbsp;fs,
@@ -521,38 +540,38 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  table StoreFile names to the full Path.</div>
 </td>
 </tr>
-<tr id="i33" class="rowColor">
+<tr id="i35" class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getTotalTableFragmentation-org.apache.hadoop.hbase.master.HMaster-">getTotalTableFragmentation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)</code>
 <div class="block">Returns the total overall fragmentation percentage.</div>
 </td>
 </tr>
-<tr id="i34" class="altColor">
+<tr id="i36" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#getVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">getVersion</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
           org.apache.hadoop.fs.Path&nbsp;rootdir)</code>
 <div class="block">Verifies current version of file system</div>
 </td>
 </tr>
-<tr id="i35" class="rowColor">
+<tr id="i37" class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#isDistributedFileSystem-org.apache.hadoop.fs.FileSystem-">isDistributedFileSystem</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs)</code>&nbsp;</td>
 </tr>
-<tr id="i36" class="altColor">
+<tr id="i38" class="altColor">
 <td class="colFirst"><code>private static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#isInSafeMode-org.apache.hadoop.hdfs.DistributedFileSystem-">isInSafeMode</a></span>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs)</code>
 <div class="block">We use reflection because <code>DistributedFileSystem.setSafeMode(
  HdfsConstants.SafeModeAction action, boolean isChecked)</code> is not in hadoop 1.1</div>
 </td>
 </tr>
-<tr id="i37" class="rowColor">
+<tr id="i39" class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#isMatchingTail-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-">isMatchingTail</a></span>(org.apache.hadoop.fs.Path&nbsp;pathToSearch,
               org.apache.hadoop.fs.Path&nbsp;pathTail)</code>
 <div class="block">Compare path component of the Path URI; e.g.</div>
 </td>
 </tr>
-<tr id="i38" class="altColor">
+<tr id="i40" class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#listStatusWithStatusFilter-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.util.FileStatusFilter-">listStatusWithStatusFilter</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                           org.apache.hadoop.fs.Path&nbsp;dir,
@@ -563,20 +582,20 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  while Hadoop 2 will throw FileNotFoundException.</div>
 </td>
 </tr>
-<tr id="i39" class="rowColor">
+<tr id="i41" class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#metaRegionExists-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">metaRegionExists</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                 org.apache.hadoop.fs.Path&nbsp;rootdir)</code>
 <div class="block">Checks if meta region exists</div>
 </td>
 </tr>
-<tr id="i40" class="altColor">
+<tr id="i42" class="altColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#parseVersionFrom-byte:A-">parseVersionFrom</a></span>(byte[]&nbsp;bytes)</code>
 <div class="block">Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.</div>
 </td>
 </tr>
-<tr id="i41" class="rowColor">
+<tr id="i43" class="rowColor">
 <td class="colFirst"><code>abstract void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#recoverFileLease-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.util.CancelableProgressable-">recoverFileLease</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                 org.apache.hadoop.fs.Path&nbsp;p,
@@ -585,14 +604,14 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <div class="block">Recover file lease.</div>
 </td>
 </tr>
-<tr id="i42" class="altColor">
+<tr id="i44" class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#rewriteAsPb-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.ClusterId-">rewriteAsPb</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
            org.apache.hadoop.fs.Path&nbsp;rootdir,
            org.apache.hadoop.fs.Path&nbsp;p,
            <a href="../../../../../org/apache/hadoop/hbase/ClusterId.html" title="class in org.apache.hadoop.hbase">ClusterId</a>&nbsp;cid)</code>&nbsp;</td>
 </tr>
-<tr id="i43" class="rowColor">
+<tr id="i45" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setClusterId-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.ClusterId-int-">setClusterId</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
             org.apache.hadoop.fs.Path&nbsp;rootdir,
@@ -602,20 +621,20 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
  in the HBase root directory</div>
 </td>
 </tr>
-<tr id="i44" class="altColor">
+<tr id="i46" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setupShortCircuitRead-org.apache.hadoop.conf.Configuration-">setupShortCircuitRead</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
 <div class="block">Do our short circuit read setup.</div>
 </td>
 </tr>
-<tr id="i45" class="rowColor">
+<tr id="i47" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">setVersion</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
           org.apache.hadoop.fs.Path&nbsp;rootdir)</code>
 <div class="block">Sets version of file system</div>
 </td>
 </tr>
-<tr id="i46" class="altColor">
+<tr id="i48" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-int-int-">setVersion</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
           org.apache.hadoop.fs.Path&nbsp;rootdir,
@@ -624,7 +643,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <div class="block">Sets version of file system</div>
 </td>
 </tr>
-<tr id="i47" class="rowColor">
+<tr id="i49" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#setVersion-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.lang.String-int-int-">setVersion</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
           org.apache.hadoop.fs.Path&nbsp;rootdir,
@@ -634,13 +653,13 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <div class="block">Sets version of file system</div>
 </td>
 </tr>
-<tr id="i48" class="altColor">
+<tr id="i50" class="altColor">
 <td class="colFirst"><code>(package private) static byte[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#toVersionByteArray-java.lang.String-">toVersionByteArray</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;version)</code>
 <div class="block">Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.</div>
 </td>
 </tr>
-<tr id="i49" class="rowColor">
+<tr id="i51" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html#waitOnSafeMode-org.apache.hadoop.conf.Configuration-long-">waitOnSafeMode</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
               long&nbsp;wait)</code>
@@ -682,7 +701,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.103">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.105">LOG</a></pre>
 </li>
 </ul>
 <a name="THREAD_POOLSIZE">
@@ -691,7 +710,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>THREAD_POOLSIZE</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.105">THREAD_POOLSIZE</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.107">THREAD_POOLSIZE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.FSUtils.THREAD_POOLSIZE">Constant Field Values</a></dd>
@@ -704,7 +723,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_THREAD_POOLSIZE</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.106">DEFAULT_THREAD_POOLSIZE</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.108">DEFAULT_THREAD_POOLSIZE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.FSUtils.DEFAULT_THREAD_POOLSIZE">Constant Field Values</a></dd>
@@ -717,7 +736,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WINDOWS</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.110">WINDOWS</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.112">WINDOWS</a></pre>
 <div class="block">Set to true on Windows platforms</div>
 </li>
 </ul>
@@ -735,7 +754,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FSUtils</h4>
-<pre>protected&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.112">FSUtils</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.114">FSUtils</a>()</pre>
 </li>
 </ul>
 </li>
@@ -752,7 +771,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>isDistributedFileSystem</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.120">isDistributedFileSystem</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.122">isDistributedFileSystem</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -768,7 +787,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>isMatchingTail</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.138">isMatchingTail</a>(org.apache.hadoop.fs.Path&nbsp;pathToSearch,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.140">isMatchingTail</a>(org.apache.hadoop.fs.Path&nbsp;pathToSearch,
                                      org.apache.hadoop.fs.Path&nbsp;pathTail)</pre>
 <div class="block">Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the
  '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider
@@ -788,7 +807,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getInstance</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html" title="class in org.apache.hadoop.hbase.util">FSUtils</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.160">getInstance</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/FSUtils.html" title="class in org.apache.hadoop.hbase.util">FSUtils</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.162">getInstance</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                   org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
@@ -798,7 +817,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteRegionDir</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.180">deleteRegionDir</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.182">deleteRegionDir</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                       <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a>&nbsp;hri)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Delete the region directory if exists.</div>
@@ -819,7 +838,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>create</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.207">create</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.209">create</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                              org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                              org.apache.hadoop.fs.Path&nbsp;path,
                                                              org.apache.hadoop.fs.permission.FsPermission&nbsp;perm,
@@ -855,7 +874,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>checkFileSystemAvailable</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.247">checkFileSystemAvailable</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.249">checkFileSystemAvailable</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks to see if the specified file system is available</div>
 <dl>
@@ -872,7 +891,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>isInSafeMode</h4>
-<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.280">isInSafeMode</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs)
+<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.282">isInSafeMode</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">We use reflection because <code>DistributedFileSystem.setSafeMode(
  HdfsConstants.SafeModeAction action, boolean isChecked)</code> is not in hadoop 1.1</div>
@@ -892,7 +911,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>checkDfsSafeMode</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.302">checkDfsSafeMode</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.304">checkDfsSafeMode</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check whether dfs is in safemode.</div>
 <dl>
@@ -909,7 +928,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getVersion</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.324">getVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.326">getVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                 org.apache.hadoop.fs.Path&nbsp;rootdir)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                 <a href="../../../../../org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions">DeserializationException</a></pre>
@@ -932,7 +951,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>parseVersionFrom</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.367">parseVersionFrom</a>(byte[]&nbsp;bytes)
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.369">parseVersionFrom</a>(byte[]&nbsp;bytes)
                         throws <a href="../../../../../org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions">DeserializationException</a></pre>
 <div class="block">Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.</div>
 <dl>
@@ -951,7 +970,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>toVersionByteArray</h4>
-<pre>static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.387">toVersionByteArray</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;version)</pre>
+<pre>static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.389">toVersionByteArray</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;version)</pre>
 <div class="block">Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -967,7 +986,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>checkVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.403">checkVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.405">checkVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                 org.apache.hadoop.fs.Path&nbsp;rootdir,
                                 boolean&nbsp;message)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -990,7 +1009,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>checkVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.420">checkVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.422">checkVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                 org.apache.hadoop.fs.Path&nbsp;rootdir,
                                 boolean&nbsp;message,
                                 int&nbsp;wait,
@@ -1017,7 +1036,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>setVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.454">setVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.456">setVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                               org.apache.hadoop.fs.Path&nbsp;rootdir)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Sets version of file system</div>
@@ -1036,7 +1055,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>setVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.469">setVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.471">setVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                               org.apache.hadoop.fs.Path&nbsp;rootdir,
                               int&nbsp;wait,
                               int&nbsp;retries)
@@ -1059,7 +1078,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>setVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.485">setVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.487">setVersion</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                               org.apache.hadoop.fs.Path&nbsp;rootdir,
                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;version,
                               int&nbsp;wait,
@@ -1084,7 +1103,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>checkClusterIdExists</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.542">checkClusterIdExists</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.544">checkClusterIdExists</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                            org.apache.hadoop.fs.Path&nbsp;rootdir,
                                            int&nbsp;wait)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1107,7 +1126,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterId</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ClusterId.html" title="class in org.apache.hadoop.hbase">ClusterId</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.571">getClusterId</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ClusterId.html" title="class in org.apache.hadoop.hbase">ClusterId</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.573">getClusterId</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                      org.apache.hadoop.fs.Path&nbsp;rootdir)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns the value of the unique cluster ID stored for this HBase instance.</div>
@@ -1128,7 +1147,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>rewriteAsPb</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.617">rewriteAsPb</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.619">rewriteAsPb</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                 org.apache.hadoop.fs.Path&nbsp;rootdir,
                                 org.apache.hadoop.fs.Path&nbsp;p,
                                 <a href="../../../../../org/apache/hadoop/hbase/ClusterId.html" title="class in org.apache.hadoop.hbase">ClusterId</a>&nbsp;cid)
@@ -1147,7 +1166,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>setClusterId</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.640">setClusterId</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.642">setClusterId</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                 org.apache.hadoop.fs.Path&nbsp;rootdir,
                                 <a href="../../../../../org/apache/hadoop/hbase/ClusterId.html" title="class in org.apache.hadoop.hbase">ClusterId</a>&nbsp;clusterId,
                                 int&nbsp;wait)
@@ -1171,7 +1190,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>waitOnSafeMode</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.690">waitOnSafeMode</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.692">waitOnSafeMode</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                   long&nbsp;wait)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">If DFS, check safe mode and if so, wait until we clear it.</div>
@@ -1190,7 +1209,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>metaRegionExists</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.716">metaRegionExists</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.718">metaRegionExists</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                        org.apache.hadoop.fs.Path&nbsp;rootdir)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks if meta region exists</div>
@@ -1211,7 +1230,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>computeHDFSBlocksDistribution</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HDFSBlocksDistribution.html" title="class in org.apache.hadoop.hbase">HDFSBlocksDistribution</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.731">computeHDFSBlocksDistribution</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HDFSBlocksDistribution.html" title="class in org.apache.hadoop.hbase">HDFSBlocksDistribution</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.733">computeHDFSBlocksDistribution</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                    org.apache.hadoop.fs.FileStatus&nbsp;status,
                                                                    long&nbsp;start,
                                                                    long&nbsp;length)
@@ -1236,7 +1255,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>addToHDFSBlocksDistribution</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.751">addToHDFSBlocksDistribution</a>(<a href="../../../../../org/apache/hadoop/hbase/HDFSBlocksDistribution.html" title="class in org.apache.hadoop.hbase">HDFSBlocksDistribution</a>&nbsp;blocksDistribution,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.753">addToHDFSBlocksDistribution</a>(<a href="../../../../../org/apache/hadoop/hbase/HDFSBlocksDistribution.html" title="class in org.apache.hadoop.hbase">HDFSBlocksDistribution</a>&nbsp;blocksDistribution,
                                                org.apache.hadoop.fs.BlockLocation[]&nbsp;blockLocations)
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Update blocksDistribution with blockLocations</div>
@@ -1255,7 +1274,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalTableFragmentation</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.770">getTotalTableFragmentation</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.772">getTotalTableFragmentation</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns the total overall fragmentation percentage. Includes hbase:meta and
  -ROOT- as well.</div>
@@ -1275,7 +1294,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableFragmentation</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.786">getTableFragmentation</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.788">getTableFragmentation</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)
                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Runs through the HBase rootdir and checks how many stores for each table
  have more than one file in them. Checks -ROOT- and hbase:meta too. The total
@@ -1296,7 +1315,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableFragmentation</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.805">getTableFragmentation</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.807">getTableFragmentation</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                         org.apache.hadoop.fs.Path&nbsp;hbaseRootDir)
                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Runs through the HBase rootdir and checks how many stores for each table
@@ -1319,7 +1338,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>recoverFileLease</h4>
-<pre>public abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.948">recoverFileLease</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.950">recoverFileLease</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                       org.apache.hadoop.fs.Path&nbsp;p,
                                       org.apache.hadoop.conf.Configuration&nbsp;conf,
                                       <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)
@@ -1342,7 +1361,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableDirs</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.951">getTableDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.953">getTableDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                            org.apache.hadoop.fs.Path&nbsp;rootdir)
                                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1357,7 +1376,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getLocalTableDirs</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.970">getLocalTableDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.972">getLocalTableDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                 org.apache.hadoop.fs.Path&nbsp;rootdir)
                                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1378,7 +1397,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionDirs</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1017">getRegionDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1019">getRegionDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                             org.apache.hadoop.fs.Path&nbsp;tableDir)
                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Given a particular table dir, return all the regiondirs inside it, excluding files such as
@@ -1400,7 +1419,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionDir</h4>
-<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1031">getRegionDir</a>(org.apache.hadoop.fs.Path&nbsp;tableDir,
+<pre>public static&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1033">getRegionDir</a>(org.apache.hadoop.fs.Path&nbsp;tableDir,
                                                      <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region)</pre>
 </li>
 </ul>
@@ -1410,7 +1429,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyDirs</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1074">getFamilyDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1076">getFamilyDirs</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                             org.apache.hadoop.fs.Path&nbsp;regionDir)
                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Given a particular region dir, return all the familydirs inside it</div>
@@ -1431,7 +1450,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getReferenceFilePaths</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1085">getReferenceFilePaths</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1087">getReferenceFilePaths</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                     org.apache.hadoop.fs.Path&nbsp;familyDir)
                                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1446,7 +1465,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableStoreFilePathMap</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1177">getTableStoreFilePathMap</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;map,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1179">getTableStoreFilePathMap</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;map,
                                                                              org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                              org.apache.hadoop.fs.Path&nbsp;hbaseRootDir,
                                                                              <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
@@ -1478,7 +1497,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableStoreFilePathMap</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1205">getTableStoreFilePathMap</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;resultMap,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1207">getTableStoreFilePathMap</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;resultMap,
                                                                              org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                              org.apache.hadoop.fs.Path&nbsp;hbaseRootDir,
                                                                              <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
@@ -1519,7 +1538,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionReferenceFileCount</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1320">getRegionReferenceFileCount</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1322">getRegionReferenceFileCount</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                               org.apache.hadoop.fs.Path&nbsp;p)</pre>
 </li>
 </ul>
@@ -1529,7 +1548,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableStoreFilePathMap</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1346">getTableStoreFilePathMap</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1348">getTableStoreFilePathMap</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                              org.apache.hadoop.fs.Path&nbsp;hbaseRootDir)
                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -1557,7 +1576,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableStoreFilePathMap</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1369">getTableStoreFilePathMap</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1371">getTableStoreFilePathMap</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                              org.apache.hadoop.fs.Path&nbsp;hbaseRootDir,
                                                                              org.apache.hadoop.fs.PathFilter&nbsp;sfFilter,
                                                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a>&nbsp;executor,
@@ -1591,7 +1610,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>filterFileStatuses</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1393">filterFileStatuses</a>(org.apache.hadoop.fs.FileStatus[]&nbsp;input,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1395">filterFileStatuses</a>(org.apache.hadoop.fs.FileStatus[]&nbsp;input,
                                                                        <a href="../../../../../org/apache/hadoop/hbase/util/FileStatusFilter.html" title="interface in org.apache.hadoop.hbase.util">FileStatusFilter</a>&nbsp;filter)</pre>
 <div class="block">Filters FileStatuses in an array and returns a list</div>
 <dl>
@@ -1609,7 +1628,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>filterFileStatuses</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1406">filterFileStatuses</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;input,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1408">filterFileStatuses</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;input,
                                                                        <a href="../../../../../org/apache/hadoop/hbase/util/FileStatusFilter.html" title="interface in org.apache.hadoop.hbase.util">FileStatusFilter</a>&nbsp;filter)</pre>
 <div class="block">Filters FileStatuses in an iterator and returns a list</div>
 <dl>
@@ -1627,7 +1646,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>listStatusWithStatusFilter</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1430">listStatusWithStatusFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1432">listStatusWithStatusFilter</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                                org.apache.hadoop.fs.Path&nbsp;dir,
                                                                                <a href="../../../../../org/apache/hadoop/hbase/util/FileStatusFilter.html" title="interface in org.apache.hadoop.hbase.util">FileStatusFilter</a>&nbsp;filter)
                                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1653,7 +1672,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>checkAccess</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1468">checkAccess</a>(org.apache.hadoop.security.UserGroupInformation&nbsp;ugi,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1470">checkAccess</a>(org.apache.hadoop.security.UserGroupInformation&nbsp;ugi,
                                org.apache.hadoop.fs.FileStatus&nbsp;file,
                                org.apache.hadoop.fs.permission.FsAction&nbsp;action)
                         throws <a href="../../../../../org/apache/hadoop/hbase/security/AccessDeniedException.html" title="class in org.apache.hadoop.hbase.security">AccessDeniedException</a></pre>
@@ -1674,7 +1693,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>contains</h4>
-<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1485">contains</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;groups,
+<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1487">contains</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;groups,
                                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;user)</pre>
 </li>
 </ul>
@@ -1684,7 +1703,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/CommonFSUtils.html"
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionDegreeLocalityMappingFromFS</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true" title="class or interface in java.lang">Float</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1507">getRegionDegreeLocalityMappingFromFS</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Float.html?is-external=true" title="class or interface in java.lang">Float</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/FSUtils.html#line.1509">getRegionDegreeLocalityMappingFromFS</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <di

<TRUNCATED>

[07/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileLinkFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistribution.addHostsAndB

<TRUNCATED>

[19/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        

<TRUNCATED>

[04/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistributi

<TRUNCATED>

[05/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistribution.addHostsAndB

<TRUNCATED>

[26/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 6d6caa0..6351d9e 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -212,9 +212,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index f4bff52..e15a09f 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,13 +229,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
index 092280f..1b57de0 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html#line.189">SplitLogWorker.TaskExecutor.Status</a>
+<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html#line.195">SplitLogWorker.TaskExecutor.Status</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>&gt;</pre>
 </li>
 </ul>
@@ -216,7 +216,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>DONE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.190">DONE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.196">DONE</a></pre>
 </li>
 </ul>
 <a name="ERR">
@@ -225,7 +225,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>ERR</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.191">ERR</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.197">ERR</a></pre>
 </li>
 </ul>
 <a name="RESIGNED">
@@ -234,7 +234,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>RESIGNED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.192">RESIGNED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.198">RESIGNED</a></pre>
 </li>
 </ul>
 <a name="PREEMPTED">
@@ -243,7 +243,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PREEMPTED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.193">PREEMPTED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.199">PREEMPTED</a></pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.206">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.212">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -280,7 +280,7 @@ for (SplitLogWorker.TaskExecutor.Status c : SplitLogWorker.TaskExecutor.Status.v
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.206">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html#line.212">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
index ea44e76..3f277c0 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.188">SplitLogWorker.TaskExecutor</a></pre>
+<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.194">SplitLogWorker.TaskExecutor</a></pre>
 <div class="block">Objects implementing this interface actually do the task that has been
  acquired by a <a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.html" title="class in org.apache.hadoop.hbase.regionserver"><code>SplitLogWorker</code></a>. Since there isn't a water-tight
  guarantee that two workers will not be executing the same task therefore it
@@ -175,7 +175,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>exec</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html#line.195">exec</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor.Status</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html#line.201">exec</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                                         <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;p)</pre>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
index 04b9cb7..ca0c4f3 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.62">SplitLogWorker</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.63">SplitLogWorker</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></pre>
 <div class="block">This worker is spawned in every regionserver, including master. The Worker waits for log
@@ -290,7 +290,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.64">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.65">LOG</a></pre>
 </li>
 </ul>
 <a name="worker">
@@ -299,7 +299,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>worker</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.66">worker</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.67">worker</a></pre>
 </li>
 </ul>
 <a name="coordination">
@@ -308,7 +308,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>coordination</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.68">coordination</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.69">coordination</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -317,7 +317,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.69">conf</a></pre>
+<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.70">conf</a></pre>
 </li>
 </ul>
 <a name="server">
@@ -326,7 +326,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>server</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.70">server</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.71">server</a></pre>
 </li>
 </ul>
 </li>
@@ -343,7 +343,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>SplitLogWorker</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.72">SplitLogWorker</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;hserver,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.73">SplitLogWorker</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;hserver,
                       org.apache.hadoop.conf.Configuration&nbsp;conf,
                       <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;server,
                       <a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html" title="interface in org.apache.hadoop.hbase.regionserver">SplitLogWorker.TaskExecutor</a>&nbsp;splitTaskExecutor)</pre>
@@ -355,7 +355,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SplitLogWorker</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.80">SplitLogWorker</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;hserver,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.81">SplitLogWorker</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;hserver,
                       org.apache.hadoop.conf.Configuration&nbsp;conf,
                       <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;server,
                       <a href="../../../../../org/apache/hadoop/hbase/regionserver/LastSequenceId.html" title="interface in org.apache.hadoop.hbase.regionserver">LastSequenceId</a>&nbsp;sequenceIdChecker,
@@ -376,7 +376,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.128">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.134">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>
@@ -389,7 +389,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>stopTask</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.159">stopTask</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.165">stopTask</a>()</pre>
 <div class="block">If the worker is doing a task i.e. splitting a log file then stop the task.
  It doesn't exit the worker thread.</div>
 </li>
@@ -400,7 +400,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>start</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.167">start</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.173">start</a>()</pre>
 <div class="block">start the SplitLogWorker thread</div>
 </li>
 </ul>
@@ -410,7 +410,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.175">stop</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.181">stop</a>()</pre>
 <div class="block">stop the SplitLogWorker thread</div>
 </li>
 </ul>
@@ -420,7 +420,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getTaskReadySeq</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.203">getTaskReadySeq</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.html#line.209">getTaskReadySeq</a>()</pre>
 <div class="block">Returns the number of tasks processed by coordination.
  This method is used by tests only</div>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 4c472b8..56aff73 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -704,19 +704,19 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 2731576..23060c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index a4ab1b7..f6fc79b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -238,8 +238,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
index c20ff47..034077c 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
@@ -110,8 +110,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index d02c856..f6d74ff 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -137,8 +137,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
index 41ff020..0f7ba9b 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.148">ExportSnapshot.Counter</a>
+<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.156">ExportSnapshot.Counter</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a>&gt;</pre>
 </li>
 </ul>
@@ -225,7 +225,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>MISSING_FILES</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.149">MISSING_FILES</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.157">MISSING_FILES</a></pre>
 </li>
 </ul>
 <a name="FILES_COPIED">
@@ -234,7 +234,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>FILES_COPIED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.149">FILES_COPIED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.157">FILES_COPIED</a></pre>
 </li>
 </ul>
 <a name="FILES_SKIPPED">
@@ -243,7 +243,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>FILES_SKIPPED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.149">FILES_SKIPPED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.157">FILES_SKIPPED</a></pre>
 </li>
 </ul>
 <a name="COPY_FAILED">
@@ -252,7 +252,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>COPY_FAILED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.149">COPY_FAILED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.157">COPY_FAILED</a></pre>
 </li>
 </ul>
 <a name="BYTES_EXPECTED">
@@ -261,7 +261,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>BYTES_EXPECTED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.150">BYTES_EXPECTED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.158">BYTES_EXPECTED</a></pre>
 </li>
 </ul>
 <a name="BYTES_SKIPPED">
@@ -270,7 +270,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>BYTES_SKIPPED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.150">BYTES_SKIPPED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.158">BYTES_SKIPPED</a></pre>
 </li>
 </ul>
 <a name="BYTES_COPIED">
@@ -279,7 +279,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BYTES_COPIED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.150">BYTES_COPIED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.158">BYTES_COPIED</a></pre>
 </li>
 </ul>
 </li>
@@ -296,7 +296,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.148">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.156">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -316,7 +316,7 @@ for (ExportSnapshot.Counter c : ExportSnapshot.Counter.values())
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.148">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html" title="enum in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Counter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html#line.156">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
index 6c7a18b..2d219f0 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.153">ExportSnapshot.ExportMapper</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.161">ExportSnapshot.ExportMapper</a>
 extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.NullWritable&gt;</pre>
 </li>
 </ul>
@@ -373,7 +373,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.155">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.163">LOG</a></pre>
 </li>
 </ul>
 <a name="REPORT_SIZE">
@@ -382,7 +382,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>REPORT_SIZE</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.156">REPORT_SIZE</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.164">REPORT_SIZE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.ExportMapper.REPORT_SIZE">Constant Field Values</a></dd>
@@ -395,7 +395,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>BUFFER_SIZE</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.157">BUFFER_SIZE</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.165">BUFFER_SIZE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.ExportMapper.BUFFER_SIZE">Constant Field Values</a></dd>
@@ -408,7 +408,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyChecksum</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.159">verifyChecksum</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.167">verifyChecksum</a></pre>
 </li>
 </ul>
 <a name="filesGroup">
@@ -417,7 +417,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>filesGroup</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.160">filesGroup</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.168">filesGroup</a></pre>
 </li>
 </ul>
 <a name="filesUser">
@@ -426,7 +426,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>filesUser</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.161">filesUser</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.169">filesUser</a></pre>
 </li>
 </ul>
 <a name="filesMode">
@@ -435,7 +435,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>filesMode</h4>
-<pre>private&nbsp;short <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.162">filesMode</a></pre>
+<pre>private&nbsp;short <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.170">filesMode</a></pre>
 </li>
 </ul>
 <a name="bufferSize">
@@ -444,7 +444,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>bufferSize</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.163">bufferSize</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.171">bufferSize</a></pre>
 </li>
 </ul>
 <a name="outputFs">
@@ -453,7 +453,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>outputFs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.165">outputFs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.173">outputFs</a></pre>
 </li>
 </ul>
 <a name="outputArchive">
@@ -462,7 +462,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>outputArchive</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.166">outputArchive</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.174">outputArchive</a></pre>
 </li>
 </ul>
 <a name="outputRoot">
@@ -471,7 +471,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>outputRoot</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.167">outputRoot</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.175">outputRoot</a></pre>
 </li>
 </ul>
 <a name="inputFs">
@@ -480,7 +480,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>inputFs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.169">inputFs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.177">inputFs</a></pre>
 </li>
 </ul>
 <a name="inputArchive">
@@ -489,7 +489,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>inputArchive</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.170">inputArchive</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.178">inputArchive</a></pre>
 </li>
 </ul>
 <a name="inputRoot">
@@ -498,7 +498,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>inputRoot</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.171">inputRoot</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.179">inputRoot</a></pre>
 </li>
 </ul>
 <a name="testing">
@@ -507,7 +507,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testing</h4>
-<pre>private static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Testing</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.173">testing</a></pre>
+<pre>private static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot.Testing</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.181">testing</a></pre>
 </li>
 </ul>
 </li>
@@ -524,7 +524,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ExportMapper</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.153">ExportMapper</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.161">ExportMapper</a>()</pre>
 </li>
 </ul>
 </li>
@@ -541,7 +541,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>setup</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.176">setup</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.184">setup</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
@@ -557,7 +557,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>cleanup</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.224">cleanup</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.232">cleanup</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code>cleanup</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable,org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.NullWritable&gt;</code></dd>
@@ -570,7 +570,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>map</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.230">map</a>(org.apache.hadoop.io.BytesWritable&nbsp;key,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.238">map</a>(org.apache.hadoop.io.BytesWritable&nbsp;key,
                 org.apache.hadoop.io.NullWritable&nbsp;value,
                 org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context)
          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
@@ -590,7 +590,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>getOutputPath</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.241">getOutputPath</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo&nbsp;inputInfo)
+<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.249">getOutputPath</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo&nbsp;inputInfo)
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns the location where the inputPath will be copied.</div>
 <dl>
@@ -605,7 +605,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>injectTestFailure</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.266">injectTestFailure</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.274">injectTestFailure</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
                                org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo&nbsp;inputInfo)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Used by TestExportSnapshot to test for retries when failures happen.
@@ -622,7 +622,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>copyFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.277">copyFile</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.285">copyFile</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
                       org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo&nbsp;inputInfo,
                       org.apache.hadoop.fs.Path&nbsp;outputPath)
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -638,7 +638,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>createOutputPath</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.324">createOutputPath</a>(org.apache.hadoop.fs.Path&nbsp;path)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.332">createOutputPath</a>(org.apache.hadoop.fs.Path&nbsp;path)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create the output folder and optionally set ownership.</div>
 <dl>
@@ -653,7 +653,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>preserveAttributes</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.351">preserveAttributes</a>(org.apache.hadoop.fs.Path&nbsp;path,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.359">preserveAttributes</a>(org.apache.hadoop.fs.Path&nbsp;path,
                                    org.apache.hadoop.fs.FileStatus&nbsp;refStat)</pre>
 <div class="block">Try to Preserve the files attribute selected by the user copying them from the source file
  This is only required when you are exporting as a different user than "hbase" or on a system
@@ -669,7 +669,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>stringIsNotEmpty</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.390">stringIsNotEmpty</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;str)</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.398">stringIsNotEmpty</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;str)</pre>
 </li>
 </ul>
 <a name="copyData-org.apache.hadoop.mapreduce.Mapper.Context-org.apache.hadoop.fs.Path-java.io.InputStream-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FSDataOutputStream-long-">
@@ -678,7 +678,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>copyData</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.394">copyData</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.402">copyData</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
                       org.apache.hadoop.fs.Path&nbsp;inputPath,
                       <a href="https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in,
                       org.apache.hadoop.fs.Path&nbsp;outputPath,
@@ -697,7 +697,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>openSourceFile</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FSDataInputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.456">openSourceFile</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
+<pre>private&nbsp;org.apache.hadoop.fs.FSDataInputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.464">openSourceFile</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo&nbsp;fileInfo)
                                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Try to open the "source" file.
@@ -715,7 +715,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>getSourceFileStatus</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileStatus&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.482">getSourceFileStatus</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
+<pre>private&nbsp;org.apache.hadoop.fs.FileStatus&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.490">getSourceFileStatus</a>(org.apache.hadoop.mapreduce.Mapper.Context&nbsp;context,
                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo&nbsp;fileInfo)
                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -730,7 +730,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileLink</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/FileLink.html" title="class in org.apache.hadoop.hbase.io">FileLink</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.509">getFileLink</a>(org.apache.hadoop.fs.Path&nbsp;path,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/FileLink.html" title="class in org.apache.hadoop.hbase.io">FileLink</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.517">getFileLink</a>(org.apache.hadoop.fs.Path&nbsp;path,
                              org.apache.hadoop.conf.Configuration&nbsp;conf)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -745,7 +745,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileChecksum</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileChecksum&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.519">getFileChecksum</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private&nbsp;org.apache.hadoop.fs.FileChecksum&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.527">getFileChecksum</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                           org.apache.hadoop.fs.Path&nbsp;path)</pre>
 </li>
 </ul>
@@ -755,7 +755,7 @@ extends org.apache.hadoop.mapreduce.Mapper&lt;org.apache.hadoop.io.BytesWritable
 <ul class="blockListLast">
 <li class="blockList">
 <h4>sameFile</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.532">sameFile</a>(org.apache.hadoop.fs.FileStatus&nbsp;inputStat,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#line.540">sameFile</a>(org.apache.hadoop.fs.FileStatus&nbsp;inputStat,
                          org.apache.hadoop.fs.FileStatus&nbsp;outputStat)</pre>
 <div class="block">Check if the two files are equal by looking at the file length,
  and at the checksum (if user has specified the verifyChecksum flag).</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
index bc62610..d72acad 100644
--- a/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
+++ b/devapidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.689">ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html#line.697">ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit</a>
 extends org.apache.hadoop.mapreduce.InputSplit
 implements org.apache.hadoop.io.Writable</pre>
 </li>
@@ -241,7 +241,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>files</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.690">files</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.698">files</a></pre>
 </li>
 </ul>
 <a name="length">
@@ -250,7 +250,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>length</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.691">length</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.699">length</a></pre>
 </li>
 </ul>
 </li>
@@ -267,7 +267,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>ExportSnapshotInputSplit</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.693">ExportSnapshotInputSplit</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.701">ExportSnapshotInputSplit</a>()</pre>
 </li>
 </ul>
 <a name="ExportSnapshotInputSplit-java.util.List-">
@@ -276,7 +276,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ExportSnapshotInputSplit</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.697">ExportSnapshotInputSplit</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;snapshotFiles)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.705">ExportSnapshotInputSplit</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;snapshotFiles)</pre>
 </li>
 </ul>
 </li>
@@ -293,7 +293,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getSplitKeys</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.706">getSplitKeys</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hadoop.io.BytesWritable,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.714">getSplitKeys</a>()</pre>
 </li>
 </ul>
 <a name="getLength--">
@@ -302,7 +302,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getLength</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.711">getLength</a>()
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.719">getLength</a>()
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>
@@ -320,7 +320,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getLocations</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.716">getLocations</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.724">getLocations</a>()
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>
@@ -338,7 +338,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>readFields</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.721">readFields</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;in)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.729">readFields</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;in)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -354,7 +354,7 @@ implements org.apache.hadoop.io.Writable</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.735">write</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>&nbsp;out)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html#line.743">write</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>&nbsp;out)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>


[16/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a na

<TRUNCATED>

[18/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotInputSplit.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a na

<TRUNCATED>

[12/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.BlackListDirFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistributi

<TRUNCATED>

[20/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Counter.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        LOG.error("Unable to get 

<TRUNCATED>

[08/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.HFileFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistribution.addHostsAndBlockWeight(hosts, le

<TRUNCATED>

[27/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index b8f6378..3e23a0c 100644
--- a/book.html
+++ b/book.html
@@ -19872,21 +19872,21 @@ creation or mutation, and <code>DELETE</code> for deletion.
 <tr>
 <td class="tableblock halign-left valign-top"><p class="tableblock"><code>/<em>table</em>/schema</code></p></td>
 <td class="tableblock halign-left valign-top"><p class="tableblock"><code>POST</code></p></td>
-<td class="tableblock halign-left valign-top"><p class="tableblock">Create a new table, or replace an existing table&#8217;s schema</p></td>
+<td class="tableblock halign-left valign-top"><p class="tableblock">Update an existing table with the provided schema fragment</p></td>
 <td class="tableblock halign-left valign-top"><div class="literal"><pre>curl -vi -X POST \
   -H "Accept: text/xml" \
   -H "Content-Type: text/xml" \
-  -d '&lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;TableSchema name="users"&gt;&lt;ColumnSchema name="cf" /&gt;&lt;/TableSchema&gt;' \
+  -d '&lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;TableSchema name="users"&gt;&lt;ColumnSchema name="cf" KEEP_DELETED_CELLS="true" /&gt;&lt;/TableSchema&gt;' \
   "http://example.com:8000/users/schema"</pre></div></td>
 </tr>
 <tr>
 <td class="tableblock halign-left valign-top"><p class="tableblock"><code>/<em>table</em>/schema</code></p></td>
 <td class="tableblock halign-left valign-top"><p class="tableblock"><code>PUT</code></p></td>
-<td class="tableblock halign-left valign-top"><p class="tableblock">Update an existing table with the provided schema fragment</p></td>
+<td class="tableblock halign-left valign-top"><p class="tableblock">Create a new table, or replace an existing table&#8217;s schema</p></td>
 <td class="tableblock halign-left valign-top"><div class="literal"><pre>curl -vi -X PUT \
   -H "Accept: text/xml" \
   -H "Content-Type: text/xml" \
-  -d '&lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;TableSchema name="users"&gt;&lt;ColumnSchema name="cf" KEEP_DELETED_CELLS="true" /&gt;&lt;/TableSchema&gt;' \
+  -d '&lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;TableSchema name="users"&gt;&lt;ColumnSchema name="cf" /&gt;&lt;/TableSchema&gt;' \
   "http://example.com:8000/users/schema"</pre></div></td>
 </tr>
 <tr>
@@ -37873,7 +37873,7 @@ The server will return cellblocks compressed using this same compressor as long
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2018-05-17 14:29:55 UTC
+Last updated 2018-05-18 14:29:51 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index f3a6e17..099355e 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -306,7 +306,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/checkstyle-aggregate.html
----------------------------------------------------------------------
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index c930e42..3a8e9b9 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Checkstyle Results</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -85787,13 +85787,13 @@
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.conf.Configuration' import.</td>
-<td>29</td></tr>
+<td>30</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.hbase.util.CancelableProgressable' import.</td>
-<td>38</td></tr></table></div>
+<td>39</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.regionserver.SplitRequest.java">org/apache/hadoop/hbase/regionserver/SplitRequest.java</h3>
 <table border="0" class="table table-striped">
@@ -107696,85 +107696,85 @@
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>268</td></tr>
+<td>276</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>269</td></tr>
+<td>277</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>534</td></tr>
+<td>542</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>537</td></tr>
+<td>545</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>541</td></tr>
+<td>549</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>544</td></tr>
+<td>552</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'object def rcurly' have incorrect indentation level 4, expected level should be one of the following: 6, 8.</td>
-<td>592</td></tr>
+<td>600</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
 <td>'{' at column 44 should have line break after.</td>
-<td>762</td></tr>
+<td>770</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
 <td>'{' at column 45 should have line break after.</td>
-<td>765</td></tr>
+<td>773</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
 <td>'{' at column 34 should have line break after.</td>
-<td>768</td></tr>
+<td>776</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>796</td></tr>
+<td>804</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>797</td></tr>
+<td>805</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>MethodLength</td>
-<td>Method length is 163 lines (max allowed is 150).</td>
-<td>914</td></tr>
+<td>Method length is 166 lines (max allowed is 150).</td>
+<td>939</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 102).</td>
-<td>954</td></tr></table></div>
+<td>979</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.snapshot.HBaseSnapshotException.java">org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java</h3>
 <table border="0" class="table table-striped">
@@ -114872,367 +114872,367 @@
 <td>imports</td>
 <td>UnusedImports</td>
 <td>Unused import - org.apache.hadoop.hdfs.protocol.HdfsConstants.</td>
-<td>79</td></tr>
+<td>81</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>118</td></tr>
+<td>120</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>135</td></tr>
+<td>137</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>139</td></tr>
+<td>141</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>152</td></tr>
+<td>154</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>175</td></tr>
+<td>177</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>176</td></tr>
+<td>178</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>178</td></tr>
+<td>180</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>203</td></tr>
+<td>205</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>228</td></tr>
+<td>230</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>276</td></tr>
+<td>278</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>278</td></tr>
+<td>280</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child have incorrect indentation level 10, expected level should be 8.</td>
-<td>284</td></tr>
+<td>286</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>288</td></tr>
+<td>290</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>299</td></tr>
+<td>301</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>300</td></tr>
+<td>302</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>322</td></tr>
+<td>324</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>335</td></tr>
+<td>337</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>365</td></tr>
+<td>367</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 102).</td>
-<td>385</td></tr>
+<td>387</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>401</td></tr>
+<td>403</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>418</td></tr>
+<td>420</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>431</td></tr>
+<td>433</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>510</td></tr>
+<td>512</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>614</td></tr>
+<td>616</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>615</td></tr>
+<td>617</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>623</td></tr>
+<td>625</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>661</td></tr>
+<td>663</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>694</td></tr>
+<td>696</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>927</td></tr>
+<td>929</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>946</td></tr>
+<td>948</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>964</td></tr>
+<td>966</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>965</td></tr>
+<td>967</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 105).</td>
-<td>966</td></tr>
+<td>968</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>967</td></tr>
+<td>969</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>968</td></tr>
+<td>970</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1015</td></tr>
+<td>1017</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 103).</td>
-<td>1017</td></tr>
+<td>1019</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1072</td></tr>
+<td>1074</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>1074</td></tr>
+<td>1076</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 112).</td>
-<td>1085</td></tr>
+<td>1087</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1175</td></tr>
+<td>1177</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'final' have incorrect indentation level 2, expected level should be 4.</td>
-<td>1178</td></tr>
+<td>1180</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>1194</td></tr>
+<td>1196</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1203</td></tr>
+<td>1205</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 102).</td>
-<td>1208</td></tr>
+<td>1210</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 110).</td>
-<td>1221</td></tr>
+<td>1223</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>1243</td></tr>
+<td>1245</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>'(' is followed by whitespace.</td>
-<td>1269</td></tr>
+<td>1271</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1344</td></tr>
+<td>1346</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1367</td></tr>
+<td>1369</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1395</td></tr>
+<td>1397</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1408</td></tr>
+<td>1410</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>1572</td></tr>
+<td>1574</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 110).</td>
-<td>1574</td></tr>
+<td>1576</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1668</td></tr>
+<td>1670</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1686</td></tr>
+<td>1688</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1695</td></tr>
+<td>1697</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1702</td></tr>
+<td>1704</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1704</td></tr>
+<td>1706</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1708</td></tr></table></div>
+<td>1710</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.util.FSVisitor.java">org/apache/hadoop/hbase/util/FSVisitor.java</h3>
 <table border="0" class="table table-striped">
@@ -119141,43 +119141,43 @@
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>86</td></tr>
+<td>87</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>ArrayTypeStyle</td>
 <td>Array brackets at illegal position.</td>
-<td>106</td></tr>
+<td>107</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try rcurly' have incorrect indentation level 6, expected level should be 4.</td>
-<td>133</td></tr>
+<td>134</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>ArrayTypeStyle</td>
 <td>Array brackets at illegal position.</td>
-<td>140</td></tr>
+<td>141</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>ArrayTypeStyle</td>
 <td>Array brackets at illegal position.</td>
-<td>175</td></tr>
+<td>176</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 122).</td>
-<td>296</td></tr>
+<td>297</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>384</td></tr></table></div>
+<td>385</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.util.TestFSVisitor.java">org/apache/hadoop/hbase/util/TestFSVisitor.java</h3>
 <table border="0" class="table table-striped">
@@ -123477,7 +123477,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/coc.html
----------------------------------------------------------------------
diff --git a/coc.html b/coc.html
index 18c6923..e0c1d19 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Code of Conduct Policy
@@ -375,7 +375,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/dependencies.html
----------------------------------------------------------------------
diff --git a/dependencies.html b/dependencies.html
index 4f78937..96dcada 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependencies</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -440,7 +440,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/dependency-convergence.html
----------------------------------------------------------------------
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 649810f..acc18d3 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Reactor Dependency Convergence</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -1105,7 +1105,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/dependency-info.html
----------------------------------------------------------------------
diff --git a/dependency-info.html b/dependency-info.html
index 92e53b9..32b5660 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Dependency Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -313,7 +313,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/dependency-management.html
----------------------------------------------------------------------
diff --git a/dependency-management.html b/dependency-management.html
index 92b8ace..f50de8e 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependency Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -794,9 +794,9 @@
 <tr class="a">
 <td>org.jruby</td>
 <td><a class="externalLink" href="https://github.com/jruby/jruby/jruby-artifacts/jruby-complete">jruby-complete</a></td>
-<td>9.1.10.0</td>
+<td>9.1.17.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.gnu.org/licenses/gpl-2.0-standalone.html">GPL 2</a>, <a class="externalLink" href="http://www.gnu.org/licenses/lgpl-2.1-standalone.html">LGPL 2.1</a>, <a class="externalLink" href="http://www.eclipse.org/legal/epl-v10.html">EPL</a></td></tr>
+<td><a class="externalLink" href="http://www.gnu.org/licenses/gpl-2.0-standalone.html">GPL-2.0</a>, <a class="externalLink" href="http://www.gnu.org/licenses/lgpl-2.1-standalone.html">LGPL-2.1</a>, <a class="externalLink" href="http://www.eclipse.org/legal/epl-v20.html">EPL-2.0</a></td></tr>
 <tr class="b">
 <td>org.jruby.jcodings</td>
 <td><a class="externalLink" href="http://nexus.sonatype.org/oss-repository-hosting.html/jcodings">jcodings</a></td>
@@ -969,7 +969,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 31231b2..a1c51ac 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3768,21 +3768,21 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Thu May 17 14:39:20 UTC 2018"</code></td>
+<td class="colLast"><code>"Fri May 18 14:39:13 UTC 2018"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#revision">revision</a></code></td>
-<td class="colLast"><code>"cf529f18a9959589fa635f78df4840472526ea2c"</code></td>
+<td class="colLast"><code>"c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071"</code></td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.srcChecksum">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#srcChecksum">srcChecksum</a></code></td>
-<td class="colLast"><code>"5e1af00298043edb8a0644db5a2c28dc"</code></td>
+<td class="colLast"><code>"b02d3f1d4a3395e22f561b8280caf974"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.url">
@@ -25415,104 +25415,111 @@
 <td class="colLast"><code>"snapshot.export.checksum.verify"</code></td>
 </tr>
 <tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_COPY_MANIFEST_THREADS">
+<!--   -->
+</a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_COPY_MANIFEST_THREADS">CONF_COPY_MANIFEST_THREADS</a></code></td>
+<td class="colLast"><code>"snapshot.export.copy.references.threads"</code></td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_DEST_PREFIX">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_DEST_PREFIX">CONF_DEST_PREFIX</a></code></td>
 <td class="colLast"><code>"exportsnapshot.to."</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_FILES_GROUP">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_FILES_GROUP">CONF_FILES_GROUP</a></code></td>
 <td class="colLast"><code>"snapshot.export.files.attributes.group"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_FILES_MODE">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_FILES_MODE">CONF_FILES_MODE</a></code></td>
 <td class="colLast"><code>"snapshot.export.files.attributes.mode"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_FILES_USER">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_FILES_USER">CONF_FILES_USER</a></code></td>
 <td class="colLast"><code>"snapshot.export.files.attributes.user"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_INPUT_ROOT">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_INPUT_ROOT">CONF_INPUT_ROOT</a></code></td>
 <td class="colLast"><code>"snapshot.export.input.root"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_MAP_GROUP">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_MAP_GROUP">CONF_MAP_GROUP</a></code></td>
 <td class="colLast"><code>"snapshot.export.default.map.group"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_MR_JOB_NAME">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_MR_JOB_NAME">CONF_MR_JOB_NAME</a></code></td>
 <td class="colLast"><code>"mapreduce.job.name"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_NUM_SPLITS">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_NUM_SPLITS">CONF_NUM_SPLITS</a></code></td>
 <td class="colLast"><code>"snapshot.export.format.splits"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_OUTPUT_ROOT">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_OUTPUT_ROOT">CONF_OUTPUT_ROOT</a></code></td>
 <td class="colLast"><code>"snapshot.export.output.root"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SKIP_TMP">
 <!--   -->
 </a><code>protected&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SKIP_TMP">CONF_SKIP_TMP</a></code></td>
 <td class="colLast"><code>"snapshot.export.skip.tmp"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SNAPSHOT_DIR">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SNAPSHOT_DIR">CONF_SNAPSHOT_DIR</a></code></td>
 <td class="colLast"><code>"snapshot.export.format.snapshot.dir"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SNAPSHOT_NAME">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SNAPSHOT_NAME">CONF_SNAPSHOT_NAME</a></code></td>
 <td class="colLast"><code>"snapshot.export.format.snapshot.name"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SOURCE_PREFIX">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_SOURCE_PREFIX">CONF_SOURCE_PREFIX</a></code></td>
 <td class="colLast"><code>"exportsnapshot.from."</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.MR_NUM_MAPS">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#MR_NUM_MAPS">MR_NUM_MAPS</a></code></td>
 <td class="colLast"><code>"mapreduce.job.maps"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.snapshot.ExportSnapshot.NAME">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index ef12cfb..aa76175 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -16297,6 +16297,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/http/HttpServer.html#CONF_CONTEXT_ATTRIBUTE">CONF_CONTEXT_ATTRIBUTE</a></span> - Static variable in class org.apache.hadoop.hbase.http.<a href="org/apache/hadoop/hbase/http/HttpServer.html" title="class in org.apache.hadoop.hbase.http">HttpServer</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_COPY_MANIFEST_THREADS">CONF_COPY_MANIFEST_THREADS</a></span> - Static variable in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/CompactionTool.html#CONF_DELETE_COMPACTED">CONF_DELETE_COMPACTED</a></span> - Static variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/CompactionTool.html" title="class in org.apache.hadoop.hbase.regionserver">CompactionTool</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#CONF_DEST_PREFIX">CONF_DEST_PREFIX</a></span> - Static variable in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
@@ -17874,6 +17876,10 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html#copyFile-org.apache.hadoop.mapreduce.Mapper.Context-org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo-org.apache.hadoop.fs.Path-">copyFile(Mapper&lt;BytesWritable, NullWritable, NullWritable, NullWritable&gt;.Context, SnapshotProtos.SnapshotFileInfo, Path)</a></span> - Method in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportMapper.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot.ExportMapper</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/FSUtils.html#copyFiles-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.conf.Configuration-java.util.concurrent.ExecutorService-java.util.List-">copyFiles(FileSystem, Path, FileSystem, Path, Configuration, ExecutorService, List&lt;Future&lt;Void&gt;&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/FSUtils.html" title="class in org.apache.hadoop.hbase.util">FSUtils</a></dt>
+<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/FSUtils.html#copyFilesParallel-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.conf.Configuration-int-">copyFilesParallel(FileSystem, Path, FileSystem, Path, Configuration, int)</a></span> - Static method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/FSUtils.html" title="class in org.apache.hadoop.hbase.util">FSUtils</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/Result.html#copyFrom-org.apache.hadoop.hbase.client.Result-">copyFrom(Result)</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a></dt>
 <dd>
 <div class="block">Copy another Result into this one.</div>
@@ -22630,6 +22636,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html#DEFAULT_COPROCESSORS_ENABLED">DEFAULT_COPROCESSORS_ENABLED</a></span> - Static variable in class org.apache.hadoop.hbase.coprocessor.<a href="org/apache/hadoop/hbase/coprocessor/CoprocessorHost.html" title="class in org.apache.hadoop.hbase.coprocessor">CoprocessorHost</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#DEFAULT_COPY_MANIFEST_THREADS">DEFAULT_COPY_MANIFEST_THREADS</a></span> - Static variable in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HBaseFsck.html#DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME">DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME</a></span> - Static variable in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HBaseFsck.html#DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL">DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL</a></span> - Static variable in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a></dt>
@@ -99341,6 +99349,8 @@ service.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.ImplData.html#setConfig-org.apache.hadoop.conf.Configuration-">setConfig(Configuration)</a></span> - Method in class org.apache.hadoop.hbase.tmpl.regionserver.<a href="org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.ImplData.html" title="class in org.apache.hadoop.hbase.tmpl.regionserver">BlockCacheTmpl.ImplData</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setConfigParallel-org.apache.hadoop.fs.FileSystem-java.util.List-java.util.function.BiConsumer-org.apache.hadoop.conf.Configuration-">setConfigParallel(FileSystem, List&lt;Path&gt;, BiConsumer&lt;FileSystem, Path&gt;, Configuration)</a></span> - Method in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html#setConfiguration-java.lang.String-java.lang.String-">setConfiguration(String, String)</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html" title="class in org.apache.hadoop.hbase.client">ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor</a></dt>
 <dd>
 <div class="block">Setter for storing a configuration setting in <a href="org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.html#configuration"><code>ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor.configuration</code></a> map.</div>
@@ -101184,10 +101194,8 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/procedure2/Procedure.html#setOwner-org.apache.hadoop.hbase.security.User-">setOwner(User)</a></span> - Method in class org.apache.hadoop.hbase.procedure2.<a href="org/apache/hadoop/hbase/procedure2/Procedure.html" title="class in org.apache.hadoop.hbase.procedure2">Procedure</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setOwner-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-java.lang.String-java.lang.String-boolean-">setOwner(FileSystem, Path, String, String, boolean)</a></span> - Method in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
-<dd>
-<div class="block">Set path ownership.</div>
-</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setOwnerParallel-org.apache.hadoop.fs.FileSystem-java.lang.String-java.lang.String-org.apache.hadoop.conf.Configuration-java.util.List-">setOwnerParallel(FileSystem, String, String, Configuration, List&lt;Path&gt;)</a></span> - Method in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html#setOwnerString-java.lang.String-">setOwnerString(String)</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html" title="class in org.apache.hadoop.hbase.client">TableDescriptorBuilder.ModifyableTableDescriptor</a></dt>
 <dd>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span></div>
@@ -101226,10 +101234,8 @@ service.</div>
 <dd>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span></div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setPermission-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-short-boolean-">setPermission(FileSystem, Path, short, boolean)</a></span> - Method in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
-<dd>
-<div class="block">Set path permission.</div>
-</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#setPermissionParallel-org.apache.hadoop.fs.FileSystem-short-java.util.List-org.apache.hadoop.conf.Configuration-">setPermissionParallel(FileSystem, short, List&lt;Path&gt;, Configuration)</a></span> - Method in class org.apache.hadoop.hbase.snapshot.<a href="org/apache/hadoop/hbase/snapshot/ExportSnapshot.html" title="class in org.apache.hadoop.hbase.snapshot">ExportSnapshot</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/BackupInfo.html#setPhase-org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase-">setPhase(BackupInfo.BackupPhase)</a></span> - Method in class org.apache.hadoop.hbase.backup.<a href="org/apache/hadoop/hbase/backup/BackupInfo.html" title="class in org.apache.hadoop.hbase.backup">BackupInfo</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/http/HttpConfig.html#setPolicy-org.apache.hadoop.hbase.http.HttpConfig.Policy-">setPolicy(HttpConfig.Policy)</a></span> - Method in class org.apache.hadoop.hbase.http.<a href="org/apache/hadoop/hbase/http/HttpConfig.html" title="class in org.apache.hadoop.hbase.http">HttpConfig</a></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index ecc4902..52c7f71 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 4b4a0fd..4e18ae6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -550,24 +550,24 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 0249a53..6dea30d 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -183,14 +183,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index ce77151..8fb27c3 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheConfig.ExternalBlockCaches</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 7600159..1f90dde 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -349,8 +349,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index b8bf0a6..76f6d8f 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -294,9 +294,9 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 6d426da..7666acb 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -346,11 +346,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index a7119b7..550d779 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -208,9 +208,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 0d69462..c3fdff4 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -441,18 +441,18 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
 </ul>
 </li>


[15/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Options.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        LOG.error("Unable to get 

<TRUNCATED>

[22/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 6c464c1..845224b 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -532,14 +532,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index 9ec5b35..c0e28b4 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -189,8 +189,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index f05b00d..9bc07e3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 <span class="sourceLineNo">008</span>@InterfaceAudience.Private<a name="line.8"></a>
 <span class="sourceLineNo">009</span>public class Version {<a name="line.9"></a>
 <span class="sourceLineNo">010</span>  public static final String version = "3.0.0-SNAPSHOT";<a name="line.10"></a>
-<span class="sourceLineNo">011</span>  public static final String revision = "cf529f18a9959589fa635f78df4840472526ea2c";<a name="line.11"></a>
+<span class="sourceLineNo">011</span>  public static final String revision = "c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = "jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Thu May 17 14:39:20 UTC 2018";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Fri May 18 14:39:13 UTC 2018";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = "git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum = "5e1af00298043edb8a0644db5a2c28dc";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum = "b02d3f1d4a3395e22f561b8280caf974";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
index 02dbc37..2547651 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html
@@ -26,192 +26,198 @@
 <span class="sourceLineNo">018</span> */<a name="line.18"></a>
 <span class="sourceLineNo">019</span>package org.apache.hadoop.hbase.regionserver;<a name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import java.io.IOException;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import java.io.InterruptedIOException;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import java.net.ConnectException;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.net.SocketTimeoutException;<a name="line.24"></a>
-<span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.slf4j.Logger;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.slf4j.LoggerFactory;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.conf.Configuration;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FileSystem;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.Server;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.RetriesExhaustedException;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.ExceptionUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.40"></a>
-<span class="sourceLineNo">041</span><a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>/**<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * This worker is spawned in every regionserver, including master. The Worker waits for log<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * splitting tasks to be put up by the {@link org.apache.hadoop.hbase.master.SplitLogManager}<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * running in the master and races with other workers in other serves to acquire those tasks.<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * The coordination is done via coordination engine.<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * &lt;p&gt;<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * If a worker has successfully moved the task from state UNASSIGNED to OWNED then it owns the task.<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * It keeps heart beating the manager by periodically moving the task from UNASSIGNED to OWNED<a name="line.51"></a>
-<span class="sourceLineNo">052</span> * state. On success it moves the task to TASK_DONE. On unrecoverable error it moves task state to<a name="line.52"></a>
-<span class="sourceLineNo">053</span> * ERR. If it cannot continue but wants the master to retry the task then it moves the task state to<a name="line.53"></a>
-<span class="sourceLineNo">054</span> * RESIGNED.<a name="line.54"></a>
-<span class="sourceLineNo">055</span> * &lt;p&gt;<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * The manager can take a task away from a worker by moving the task from OWNED to UNASSIGNED. In<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * the absence of a global lock there is a unavoidable race here - a worker might have just finished<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * its task when it is stripped of its ownership. Here we rely on the idempotency of the log<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * splitting task for correctness<a name="line.59"></a>
-<span class="sourceLineNo">060</span> */<a name="line.60"></a>
-<span class="sourceLineNo">061</span>@InterfaceAudience.Private<a name="line.61"></a>
-<span class="sourceLineNo">062</span>public class SplitLogWorker implements Runnable {<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  Thread worker;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  // thread pool which executes recovery work<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private SplitLogWorkerCoordination coordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  private Configuration conf;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>  private RegionServerServices server;<a name="line.70"></a>
-<span class="sourceLineNo">071</span><a name="line.71"></a>
-<span class="sourceLineNo">072</span>  public SplitLogWorker(Server hserver, Configuration conf, RegionServerServices server,<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      TaskExecutor splitTaskExecutor) {<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.server = server;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    this.conf = conf;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination();<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    coordination.init(server, conf, splitTaskExecutor, this);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public SplitLogWorker(final Server hserver, final Configuration conf,<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      final RegionServerServices server, final LastSequenceId sequenceIdChecker,<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      final WALFactory factory) {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    this(hserver, conf, server, new TaskExecutor() {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      @Override<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      public Status exec(String filename, CancelableProgressable p) {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>        Path walDir;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>        FileSystem fs;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>        try {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>          walDir = FSUtils.getWALRootDir(conf);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>          fs = walDir.getFileSystem(conf);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        } catch (IOException e) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>          LOG.warn("could not find root dir or fs", e);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>          return Status.RESIGNED;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>        }<a name="line.94"></a>
-<span class="sourceLineNo">095</span>        // TODO have to correctly figure out when log splitting has been<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        // interrupted or has encountered a transient error and when it has<a name="line.96"></a>
-<span class="sourceLineNo">097</span>        // encountered a bad non-retry-able persistent error.<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        try {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>          if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)),<a name="line.99"></a>
-<span class="sourceLineNo">100</span>            fs, conf, p, sequenceIdChecker,<a name="line.100"></a>
-<span class="sourceLineNo">101</span>              server.getCoordinatedStateManager().getSplitLogWorkerCoordination(), factory)) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>            return Status.PREEMPTED;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>          }<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        } catch (InterruptedIOException iioe) {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          LOG.warn("log splitting of " + filename + " interrupted, resigning", iioe);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>          return Status.RESIGNED;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>        } catch (IOException e) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          Throwable cause = e.getCause();<a name="line.108"></a>
-<span class="sourceLineNo">109</span>          if (e instanceof RetriesExhaustedException &amp;&amp; (cause instanceof NotServingRegionException<a name="line.109"></a>
-<span class="sourceLineNo">110</span>                  || cause instanceof ConnectException<a name="line.110"></a>
-<span class="sourceLineNo">111</span>                  || cause instanceof SocketTimeoutException)) {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>            LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, "<a name="line.112"></a>
-<span class="sourceLineNo">113</span>                + "resigning", e);<a name="line.113"></a>
-<span class="sourceLineNo">114</span>            return Status.RESIGNED;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          } else if (cause instanceof InterruptedException) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>            LOG.warn("log splitting of " + filename + " interrupted, resigning", e);<a name="line.116"></a>
-<span class="sourceLineNo">117</span>            return Status.RESIGNED;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>          }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>          LOG.warn("log splitting of " + filename + " failed, returning error", e);<a name="line.119"></a>
-<span class="sourceLineNo">120</span>          return Status.ERR;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>        }<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        return Status.DONE;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      }<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    });<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  @Override<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public void run() {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    try {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      LOG.info("SplitLogWorker " + server.getServerName() + " starting");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      coordination.registerListener();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      // wait for Coordination Engine is ready<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      boolean res = false;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      while (!res &amp;&amp; !coordination.isStop()) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>        res = coordination.isReady();<a name="line.135"></a>
-<span class="sourceLineNo">136</span>      }<a name="line.136"></a>
-<span class="sourceLineNo">137</span>      if (!coordination.isStop()) {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        coordination.taskLoop();<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    } catch (Throwable t) {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      if (ExceptionUtil.isInterrupt(t)) {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        LOG.info("SplitLogWorker interrupted. Exiting. " + (coordination.isStop() ? "" :<a name="line.142"></a>
-<span class="sourceLineNo">143</span>            " (ERROR: exitWorker is not set, exiting anyway)"));<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      } else {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>        // only a logical error can cause here. Printing it out<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        // to make debugging easier<a name="line.146"></a>
-<span class="sourceLineNo">147</span>        LOG.error("unexpected error ", t);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    } finally {<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      coordination.removeListener();<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      LOG.info("SplitLogWorker " + server.getServerName() + " exiting");<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  /**<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * If the worker is doing a task i.e. splitting a log file then stop the task.<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   * It doesn't exit the worker thread.<a name="line.157"></a>
-<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public void stopTask() {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    LOG.info("Sending interrupt to stop the worker thread");<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    worker.interrupt(); // TODO interrupt often gets swallowed, do what else?<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  }<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * start the SplitLogWorker thread<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public void start() {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    worker = new Thread(null, this, "SplitLogWorker-" + server.getServerName().toShortString());<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    worker.start();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /**<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * stop the SplitLogWorker thread<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  public void stop() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    coordination.stopProcessingTasks();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    stopTask();<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  }<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>  /**<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * Objects implementing this interface actually do the task that has been<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   * acquired by a {@link SplitLogWorker}. Since there isn't a water-tight<a name="line.182"></a>
-<span class="sourceLineNo">183</span>   * guarantee that two workers will not be executing the same task therefore it<a name="line.183"></a>
-<span class="sourceLineNo">184</span>   * is better to have workers prepare the task and then have the<a name="line.184"></a>
-<span class="sourceLineNo">185</span>   * {@link org.apache.hadoop.hbase.master.SplitLogManager} commit the work in<a name="line.185"></a>
-<span class="sourceLineNo">186</span>   * SplitLogManager.TaskFinisher<a name="line.186"></a>
-<span class="sourceLineNo">187</span>   */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  public interface TaskExecutor {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    enum Status {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      DONE(),<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      ERR(),<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      RESIGNED(),<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      PREEMPTED()<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    Status exec(String name, CancelableProgressable p);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  /**<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * Returns the number of tasks processed by coordination.<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * This method is used by tests only<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  @VisibleForTesting<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  public int getTaskReadySeq() {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    return coordination.getTaskReadySeq();<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  }<a name="line.205"></a>
-<span class="sourceLineNo">206</span>}<a name="line.206"></a>
+<span class="sourceLineNo">021</span>import java.io.FileNotFoundException;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.IOException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.InterruptedIOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.net.ConnectException;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.net.SocketTimeoutException;<a name="line.25"></a>
+<span class="sourceLineNo">026</span><a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.slf4j.Logger;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.slf4j.LoggerFactory;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.conf.Configuration;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FileSystem;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.Path;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Server;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.RetriesExhaustedException;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.ExceptionUtil;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.41"></a>
+<span class="sourceLineNo">042</span><a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>/**<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * This worker is spawned in every regionserver, including master. The Worker waits for log<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * splitting tasks to be put up by the {@link org.apache.hadoop.hbase.master.SplitLogManager}<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * running in the master and races with other workers in other serves to acquire those tasks.<a name="line.48"></a>
+<span class="sourceLineNo">049</span> * The coordination is done via coordination engine.<a name="line.49"></a>
+<span class="sourceLineNo">050</span> * &lt;p&gt;<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * If a worker has successfully moved the task from state UNASSIGNED to OWNED then it owns the task.<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * It keeps heart beating the manager by periodically moving the task from UNASSIGNED to OWNED<a name="line.52"></a>
+<span class="sourceLineNo">053</span> * state. On success it moves the task to TASK_DONE. On unrecoverable error it moves task state to<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * ERR. If it cannot continue but wants the master to retry the task then it moves the task state to<a name="line.54"></a>
+<span class="sourceLineNo">055</span> * RESIGNED.<a name="line.55"></a>
+<span class="sourceLineNo">056</span> * &lt;p&gt;<a name="line.56"></a>
+<span class="sourceLineNo">057</span> * The manager can take a task away from a worker by moving the task from OWNED to UNASSIGNED. In<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * the absence of a global lock there is a unavoidable race here - a worker might have just finished<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * its task when it is stripped of its ownership. Here we rely on the idempotency of the log<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * splitting task for correctness<a name="line.60"></a>
+<span class="sourceLineNo">061</span> */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>@InterfaceAudience.Private<a name="line.62"></a>
+<span class="sourceLineNo">063</span>public class SplitLogWorker implements Runnable {<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  Thread worker;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  // thread pool which executes recovery work<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  private SplitLogWorkerCoordination coordination;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private Configuration conf;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  private RegionServerServices server;<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  public SplitLogWorker(Server hserver, Configuration conf, RegionServerServices server,<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      TaskExecutor splitTaskExecutor) {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    this.server = server;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    this.conf = conf;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination();<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    coordination.init(server, conf, splitTaskExecutor, this);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  public SplitLogWorker(final Server hserver, final Configuration conf,<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      final RegionServerServices server, final LastSequenceId sequenceIdChecker,<a name="line.82"></a>
+<span class="sourceLineNo">083</span>      final WALFactory factory) {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    this(hserver, conf, server, new TaskExecutor() {<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      @Override<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      public Status exec(String filename, CancelableProgressable p) {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>        Path walDir;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>        FileSystem fs;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        try {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>          walDir = FSUtils.getWALRootDir(conf);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>          fs = walDir.getFileSystem(conf);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        } catch (IOException e) {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>          LOG.warn("could not find root dir or fs", e);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>          return Status.RESIGNED;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>        }<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        // TODO have to correctly figure out when log splitting has been<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        // interrupted or has encountered a transient error and when it has<a name="line.97"></a>
+<span class="sourceLineNo">098</span>        // encountered a bad non-retry-able persistent error.<a name="line.98"></a>
+<span class="sourceLineNo">099</span>        try {<a name="line.99"></a>
+<span class="sourceLineNo">100</span>          if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)),<a name="line.100"></a>
+<span class="sourceLineNo">101</span>            fs, conf, p, sequenceIdChecker,<a name="line.101"></a>
+<span class="sourceLineNo">102</span>              server.getCoordinatedStateManager().getSplitLogWorkerCoordination(), factory)) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>            return Status.PREEMPTED;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          }<a name="line.104"></a>
+<span class="sourceLineNo">105</span>        } catch (InterruptedIOException iioe) {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          LOG.warn("log splitting of " + filename + " interrupted, resigning", iioe);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          return Status.RESIGNED;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>        } catch (IOException e) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>          if (e instanceof FileNotFoundException) {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>            // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.110"></a>
+<span class="sourceLineNo">111</span>            LOG.warn("WAL {} does not exist anymore", filename, e);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>            return Status.DONE;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>          }<a name="line.113"></a>
+<span class="sourceLineNo">114</span>          Throwable cause = e.getCause();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>          if (e instanceof RetriesExhaustedException &amp;&amp; (cause instanceof NotServingRegionException<a name="line.115"></a>
+<span class="sourceLineNo">116</span>                  || cause instanceof ConnectException<a name="line.116"></a>
+<span class="sourceLineNo">117</span>                  || cause instanceof SocketTimeoutException)) {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>            LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, "<a name="line.118"></a>
+<span class="sourceLineNo">119</span>                + "resigning", e);<a name="line.119"></a>
+<span class="sourceLineNo">120</span>            return Status.RESIGNED;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>          } else if (cause instanceof InterruptedException) {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>            LOG.warn("log splitting of " + filename + " interrupted, resigning", e);<a name="line.122"></a>
+<span class="sourceLineNo">123</span>            return Status.RESIGNED;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          }<a name="line.124"></a>
+<span class="sourceLineNo">125</span>          LOG.warn("log splitting of " + filename + " failed, returning error", e);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>          return Status.ERR;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        return Status.DONE;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    });<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public void run() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    try {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      LOG.info("SplitLogWorker " + server.getServerName() + " starting");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>      coordination.registerListener();<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      // wait for Coordination Engine is ready<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      boolean res = false;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      while (!res &amp;&amp; !coordination.isStop()) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>        res = coordination.isReady();<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      }<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      if (!coordination.isStop()) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        coordination.taskLoop();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    } catch (Throwable t) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      if (ExceptionUtil.isInterrupt(t)) {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        LOG.info("SplitLogWorker interrupted. Exiting. " + (coordination.isStop() ? "" :<a name="line.148"></a>
+<span class="sourceLineNo">149</span>            " (ERROR: exitWorker is not set, exiting anyway)"));<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      } else {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        // only a logical error can cause here. Printing it out<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        // to make debugging easier<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        LOG.error("unexpected error ", t);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    } finally {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      coordination.removeListener();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      LOG.info("SplitLogWorker " + server.getServerName() + " exiting");<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  /**<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   * If the worker is doing a task i.e. splitting a log file then stop the task.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>   * It doesn't exit the worker thread.<a name="line.163"></a>
+<span class="sourceLineNo">164</span>   */<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  public void stopTask() {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    LOG.info("Sending interrupt to stop the worker thread");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    worker.interrupt(); // TODO interrupt often gets swallowed, do what else?<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
+<span class="sourceLineNo">169</span><a name="line.169"></a>
+<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   * start the SplitLogWorker thread<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  public void start() {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    worker = new Thread(null, this, "SplitLogWorker-" + server.getServerName().toShortString());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    worker.start();<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  /**<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * stop the SplitLogWorker thread<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   */<a name="line.180"></a>
+<span class="sourceLineNo">181</span>  public void stop() {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    coordination.stopProcessingTasks();<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    stopTask();<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * Objects implementing this interface actually do the task that has been<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * acquired by a {@link SplitLogWorker}. Since there isn't a water-tight<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * guarantee that two workers will not be executing the same task therefore it<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * is better to have workers prepare the task and then have the<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * {@link org.apache.hadoop.hbase.master.SplitLogManager} commit the work in<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * SplitLogManager.TaskFinisher<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  public interface TaskExecutor {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    enum Status {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      DONE(),<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      ERR(),<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      RESIGNED(),<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      PREEMPTED()<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    Status exec(String name, CancelableProgressable p);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>  /**<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * Returns the number of tasks processed by coordination.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * This method is used by tests only<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   */<a name="line.207"></a>
+<span class="sourceLineNo">208</span>  @VisibleForTesting<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public int getTaskReadySeq() {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    return coordination.getTaskReadySeq();<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>}<a name="line.212"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
index 02dbc37..2547651 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.html
@@ -26,192 +26,198 @@
 <span class="sourceLineNo">018</span> */<a name="line.18"></a>
 <span class="sourceLineNo">019</span>package org.apache.hadoop.hbase.regionserver;<a name="line.19"></a>
 <span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import java.io.IOException;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import java.io.InterruptedIOException;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import java.net.ConnectException;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.net.SocketTimeoutException;<a name="line.24"></a>
-<span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.slf4j.Logger;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.slf4j.LoggerFactory;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.conf.Configuration;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FileSystem;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.Server;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.RetriesExhaustedException;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.ExceptionUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.40"></a>
-<span class="sourceLineNo">041</span><a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>/**<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * This worker is spawned in every regionserver, including master. The Worker waits for log<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * splitting tasks to be put up by the {@link org.apache.hadoop.hbase.master.SplitLogManager}<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * running in the master and races with other workers in other serves to acquire those tasks.<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * The coordination is done via coordination engine.<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * &lt;p&gt;<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * If a worker has successfully moved the task from state UNASSIGNED to OWNED then it owns the task.<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * It keeps heart beating the manager by periodically moving the task from UNASSIGNED to OWNED<a name="line.51"></a>
-<span class="sourceLineNo">052</span> * state. On success it moves the task to TASK_DONE. On unrecoverable error it moves task state to<a name="line.52"></a>
-<span class="sourceLineNo">053</span> * ERR. If it cannot continue but wants the master to retry the task then it moves the task state to<a name="line.53"></a>
-<span class="sourceLineNo">054</span> * RESIGNED.<a name="line.54"></a>
-<span class="sourceLineNo">055</span> * &lt;p&gt;<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * The manager can take a task away from a worker by moving the task from OWNED to UNASSIGNED. In<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * the absence of a global lock there is a unavoidable race here - a worker might have just finished<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * its task when it is stripped of its ownership. Here we rely on the idempotency of the log<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * splitting task for correctness<a name="line.59"></a>
-<span class="sourceLineNo">060</span> */<a name="line.60"></a>
-<span class="sourceLineNo">061</span>@InterfaceAudience.Private<a name="line.61"></a>
-<span class="sourceLineNo">062</span>public class SplitLogWorker implements Runnable {<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  Thread worker;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  // thread pool which executes recovery work<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  private SplitLogWorkerCoordination coordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  private Configuration conf;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>  private RegionServerServices server;<a name="line.70"></a>
-<span class="sourceLineNo">071</span><a name="line.71"></a>
-<span class="sourceLineNo">072</span>  public SplitLogWorker(Server hserver, Configuration conf, RegionServerServices server,<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      TaskExecutor splitTaskExecutor) {<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.server = server;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    this.conf = conf;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination();<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    coordination.init(server, conf, splitTaskExecutor, this);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public SplitLogWorker(final Server hserver, final Configuration conf,<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      final RegionServerServices server, final LastSequenceId sequenceIdChecker,<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      final WALFactory factory) {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    this(hserver, conf, server, new TaskExecutor() {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      @Override<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      public Status exec(String filename, CancelableProgressable p) {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>        Path walDir;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>        FileSystem fs;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>        try {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>          walDir = FSUtils.getWALRootDir(conf);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>          fs = walDir.getFileSystem(conf);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        } catch (IOException e) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>          LOG.warn("could not find root dir or fs", e);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>          return Status.RESIGNED;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>        }<a name="line.94"></a>
-<span class="sourceLineNo">095</span>        // TODO have to correctly figure out when log splitting has been<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        // interrupted or has encountered a transient error and when it has<a name="line.96"></a>
-<span class="sourceLineNo">097</span>        // encountered a bad non-retry-able persistent error.<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        try {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>          if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)),<a name="line.99"></a>
-<span class="sourceLineNo">100</span>            fs, conf, p, sequenceIdChecker,<a name="line.100"></a>
-<span class="sourceLineNo">101</span>              server.getCoordinatedStateManager().getSplitLogWorkerCoordination(), factory)) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>            return Status.PREEMPTED;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>          }<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        } catch (InterruptedIOException iioe) {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          LOG.warn("log splitting of " + filename + " interrupted, resigning", iioe);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>          return Status.RESIGNED;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>        } catch (IOException e) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          Throwable cause = e.getCause();<a name="line.108"></a>
-<span class="sourceLineNo">109</span>          if (e instanceof RetriesExhaustedException &amp;&amp; (cause instanceof NotServingRegionException<a name="line.109"></a>
-<span class="sourceLineNo">110</span>                  || cause instanceof ConnectException<a name="line.110"></a>
-<span class="sourceLineNo">111</span>                  || cause instanceof SocketTimeoutException)) {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>            LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, "<a name="line.112"></a>
-<span class="sourceLineNo">113</span>                + "resigning", e);<a name="line.113"></a>
-<span class="sourceLineNo">114</span>            return Status.RESIGNED;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          } else if (cause instanceof InterruptedException) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>            LOG.warn("log splitting of " + filename + " interrupted, resigning", e);<a name="line.116"></a>
-<span class="sourceLineNo">117</span>            return Status.RESIGNED;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>          }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>          LOG.warn("log splitting of " + filename + " failed, returning error", e);<a name="line.119"></a>
-<span class="sourceLineNo">120</span>          return Status.ERR;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>        }<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        return Status.DONE;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      }<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    });<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  @Override<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  public void run() {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    try {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      LOG.info("SplitLogWorker " + server.getServerName() + " starting");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      coordination.registerListener();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      // wait for Coordination Engine is ready<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      boolean res = false;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      while (!res &amp;&amp; !coordination.isStop()) {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>        res = coordination.isReady();<a name="line.135"></a>
-<span class="sourceLineNo">136</span>      }<a name="line.136"></a>
-<span class="sourceLineNo">137</span>      if (!coordination.isStop()) {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        coordination.taskLoop();<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    } catch (Throwable t) {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      if (ExceptionUtil.isInterrupt(t)) {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        LOG.info("SplitLogWorker interrupted. Exiting. " + (coordination.isStop() ? "" :<a name="line.142"></a>
-<span class="sourceLineNo">143</span>            " (ERROR: exitWorker is not set, exiting anyway)"));<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      } else {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>        // only a logical error can cause here. Printing it out<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        // to make debugging easier<a name="line.146"></a>
-<span class="sourceLineNo">147</span>        LOG.error("unexpected error ", t);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    } finally {<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      coordination.removeListener();<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      LOG.info("SplitLogWorker " + server.getServerName() + " exiting");<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  /**<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * If the worker is doing a task i.e. splitting a log file then stop the task.<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   * It doesn't exit the worker thread.<a name="line.157"></a>
-<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  public void stopTask() {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    LOG.info("Sending interrupt to stop the worker thread");<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    worker.interrupt(); // TODO interrupt often gets swallowed, do what else?<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  }<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  /**<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * start the SplitLogWorker thread<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   */<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public void start() {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    worker = new Thread(null, this, "SplitLogWorker-" + server.getServerName().toShortString());<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    worker.start();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  /**<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * stop the SplitLogWorker thread<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   */<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  public void stop() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    coordination.stopProcessingTasks();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    stopTask();<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  }<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>  /**<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   * Objects implementing this interface actually do the task that has been<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   * acquired by a {@link SplitLogWorker}. Since there isn't a water-tight<a name="line.182"></a>
-<span class="sourceLineNo">183</span>   * guarantee that two workers will not be executing the same task therefore it<a name="line.183"></a>
-<span class="sourceLineNo">184</span>   * is better to have workers prepare the task and then have the<a name="line.184"></a>
-<span class="sourceLineNo">185</span>   * {@link org.apache.hadoop.hbase.master.SplitLogManager} commit the work in<a name="line.185"></a>
-<span class="sourceLineNo">186</span>   * SplitLogManager.TaskFinisher<a name="line.186"></a>
-<span class="sourceLineNo">187</span>   */<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  public interface TaskExecutor {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    enum Status {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      DONE(),<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      ERR(),<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      RESIGNED(),<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      PREEMPTED()<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    Status exec(String name, CancelableProgressable p);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  /**<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * Returns the number of tasks processed by coordination.<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * This method is used by tests only<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  @VisibleForTesting<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  public int getTaskReadySeq() {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    return coordination.getTaskReadySeq();<a name="line.204"></a>
-<span class="sourceLineNo">205</span>  }<a name="line.205"></a>
-<span class="sourceLineNo">206</span>}<a name="line.206"></a>
+<span class="sourceLineNo">021</span>import java.io.FileNotFoundException;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.IOException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.InterruptedIOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.net.ConnectException;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.net.SocketTimeoutException;<a name="line.25"></a>
+<span class="sourceLineNo">026</span><a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.slf4j.Logger;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.slf4j.LoggerFactory;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.conf.Configuration;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.FileSystem;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.fs.Path;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Server;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.RetriesExhaustedException;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.ExceptionUtil;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.41"></a>
+<span class="sourceLineNo">042</span><a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>/**<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * This worker is spawned in every regionserver, including master. The Worker waits for log<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * splitting tasks to be put up by the {@link org.apache.hadoop.hbase.master.SplitLogManager}<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * running in the master and races with other workers in other serves to acquire those tasks.<a name="line.48"></a>
+<span class="sourceLineNo">049</span> * The coordination is done via coordination engine.<a name="line.49"></a>
+<span class="sourceLineNo">050</span> * &lt;p&gt;<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * If a worker has successfully moved the task from state UNASSIGNED to OWNED then it owns the task.<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * It keeps heart beating the manager by periodically moving the task from UNASSIGNED to OWNED<a name="line.52"></a>
+<span class="sourceLineNo">053</span> * state. On success it moves the task to TASK_DONE. On unrecoverable error it moves task state to<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * ERR. If it cannot continue but wants the master to retry the task then it moves the task state to<a name="line.54"></a>
+<span class="sourceLineNo">055</span> * RESIGNED.<a name="line.55"></a>
+<span class="sourceLineNo">056</span> * &lt;p&gt;<a name="line.56"></a>
+<span class="sourceLineNo">057</span> * The manager can take a task away from a worker by moving the task from OWNED to UNASSIGNED. In<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * the absence of a global lock there is a unavoidable race here - a worker might have just finished<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * its task when it is stripped of its ownership. Here we rely on the idempotency of the log<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * splitting task for correctness<a name="line.60"></a>
+<span class="sourceLineNo">061</span> */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>@InterfaceAudience.Private<a name="line.62"></a>
+<span class="sourceLineNo">063</span>public class SplitLogWorker implements Runnable {<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class);<a name="line.65"></a>
+<span class="sourceLineNo">066</span><a name="line.66"></a>
+<span class="sourceLineNo">067</span>  Thread worker;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  // thread pool which executes recovery work<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  private SplitLogWorkerCoordination coordination;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private Configuration conf;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  private RegionServerServices server;<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  public SplitLogWorker(Server hserver, Configuration conf, RegionServerServices server,<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      TaskExecutor splitTaskExecutor) {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    this.server = server;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    this.conf = conf;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    this.coordination = hserver.getCoordinatedStateManager().getSplitLogWorkerCoordination();<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    coordination.init(server, conf, splitTaskExecutor, this);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  public SplitLogWorker(final Server hserver, final Configuration conf,<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      final RegionServerServices server, final LastSequenceId sequenceIdChecker,<a name="line.82"></a>
+<span class="sourceLineNo">083</span>      final WALFactory factory) {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    this(hserver, conf, server, new TaskExecutor() {<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      @Override<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      public Status exec(String filename, CancelableProgressable p) {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>        Path walDir;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>        FileSystem fs;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        try {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>          walDir = FSUtils.getWALRootDir(conf);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>          fs = walDir.getFileSystem(conf);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        } catch (IOException e) {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>          LOG.warn("could not find root dir or fs", e);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>          return Status.RESIGNED;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>        }<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        // TODO have to correctly figure out when log splitting has been<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        // interrupted or has encountered a transient error and when it has<a name="line.97"></a>
+<span class="sourceLineNo">098</span>        // encountered a bad non-retry-able persistent error.<a name="line.98"></a>
+<span class="sourceLineNo">099</span>        try {<a name="line.99"></a>
+<span class="sourceLineNo">100</span>          if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)),<a name="line.100"></a>
+<span class="sourceLineNo">101</span>            fs, conf, p, sequenceIdChecker,<a name="line.101"></a>
+<span class="sourceLineNo">102</span>              server.getCoordinatedStateManager().getSplitLogWorkerCoordination(), factory)) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>            return Status.PREEMPTED;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          }<a name="line.104"></a>
+<span class="sourceLineNo">105</span>        } catch (InterruptedIOException iioe) {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          LOG.warn("log splitting of " + filename + " interrupted, resigning", iioe);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          return Status.RESIGNED;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>        } catch (IOException e) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>          if (e instanceof FileNotFoundException) {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>            // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.110"></a>
+<span class="sourceLineNo">111</span>            LOG.warn("WAL {} does not exist anymore", filename, e);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>            return Status.DONE;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>          }<a name="line.113"></a>
+<span class="sourceLineNo">114</span>          Throwable cause = e.getCause();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>          if (e instanceof RetriesExhaustedException &amp;&amp; (cause instanceof NotServingRegionException<a name="line.115"></a>
+<span class="sourceLineNo">116</span>                  || cause instanceof ConnectException<a name="line.116"></a>
+<span class="sourceLineNo">117</span>                  || cause instanceof SocketTimeoutException)) {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>            LOG.warn("log replaying of " + filename + " can't connect to the target regionserver, "<a name="line.118"></a>
+<span class="sourceLineNo">119</span>                + "resigning", e);<a name="line.119"></a>
+<span class="sourceLineNo">120</span>            return Status.RESIGNED;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>          } else if (cause instanceof InterruptedException) {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>            LOG.warn("log splitting of " + filename + " interrupted, resigning", e);<a name="line.122"></a>
+<span class="sourceLineNo">123</span>            return Status.RESIGNED;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          }<a name="line.124"></a>
+<span class="sourceLineNo">125</span>          LOG.warn("log splitting of " + filename + " failed, returning error", e);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>          return Status.ERR;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        return Status.DONE;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    });<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public void run() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    try {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      LOG.info("SplitLogWorker " + server.getServerName() + " starting");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>      coordination.registerListener();<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      // wait for Coordination Engine is ready<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      boolean res = false;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      while (!res &amp;&amp; !coordination.isStop()) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>        res = coordination.isReady();<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      }<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      if (!coordination.isStop()) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        coordination.taskLoop();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    } catch (Throwable t) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      if (ExceptionUtil.isInterrupt(t)) {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        LOG.info("SplitLogWorker interrupted. Exiting. " + (coordination.isStop() ? "" :<a name="line.148"></a>
+<span class="sourceLineNo">149</span>            " (ERROR: exitWorker is not set, exiting anyway)"));<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      } else {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        // only a logical error can cause here. Printing it out<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        // to make debugging easier<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        LOG.error("unexpected error ", t);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    } finally {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      coordination.removeListener();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      LOG.info("SplitLogWorker " + server.getServerName() + " exiting");<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  /**<a name="line.161"></a>
+<span class="sourceLineNo">162</span>   * If the worker is doing a task i.e. splitting a log file then stop the task.<a name="line.162"></a>
+<span class="sourceLineNo">163</span>   * It doesn't exit the worker thread.<a name="line.163"></a>
+<span class="sourceLineNo">164</span>   */<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  public void stopTask() {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    LOG.info("Sending interrupt to stop the worker thread");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    worker.interrupt(); // TODO interrupt often gets swallowed, do what else?<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
+<span class="sourceLineNo">169</span><a name="line.169"></a>
+<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   * start the SplitLogWorker thread<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  public void start() {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    worker = new Thread(null, this, "SplitLogWorker-" + server.getServerName().toShortString());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    worker.start();<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  /**<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * stop the SplitLogWorker thread<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   */<a name="line.180"></a>
+<span class="sourceLineNo">181</span>  public void stop() {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    coordination.stopProcessingTasks();<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    stopTask();<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * Objects implementing this interface actually do the task that has been<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * acquired by a {@link SplitLogWorker}. Since there isn't a water-tight<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * guarantee that two workers will not be executing the same task therefore it<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   * is better to have workers prepare the task and then have the<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * {@link org.apache.hadoop.hbase.master.SplitLogManager} commit the work in<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * SplitLogManager.TaskFinisher<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  public interface TaskExecutor {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    enum Status {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      DONE(),<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      ERR(),<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      RESIGNED(),<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      PREEMPTED()<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    Status exec(String name, CancelableProgressable p);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>  /**<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * Returns the number of tasks processed by coordination.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * This method is used by tests only<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   */<a name="line.207"></a>
+<span class="sourceLineNo">208</span>  @VisibleForTesting<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public int getTaskReadySeq() {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    return coordination.getTaskReadySeq();<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>}<a name="line.212"></a>
 
 
 


[28/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
index 6ab40ed..b77fb8a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        LOG.error("Unable to get the status for source file=" + fileInfo.toString(), e);

<TRUNCATED>

[10/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.FamilyDirFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistribution.addHostsAndB

<TRUNCATED>

[06/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistr

<TRUNCATED>

[14/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.Testing.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException e) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        LOG.error("Unable to get 

<TRUNCATED>

[17/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
index 6ab40ed..b77fb8a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.ExportSnapshotInputFormat.ExportSnapshotRecordReader.html
@@ -37,1086 +37,1114 @@
 <span class="sourceLineNo">029</span>import java.util.Comparator;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.LinkedList;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileUtil;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HConstants;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.io.BytesWritable;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.io.IOUtils;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.io.NullWritable;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.io.Writable;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.mapreduce.Job;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.util.StringUtils;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.util.Tool;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.slf4j.Logger;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.slf4j.LoggerFactory;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>/**<a name="line.80"></a>
-<span class="sourceLineNo">081</span> * Export the specified snapshot to a given FileSystem.<a name="line.81"></a>
-<span class="sourceLineNo">082</span> *<a name="line.82"></a>
-<span class="sourceLineNo">083</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.83"></a>
-<span class="sourceLineNo">084</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.84"></a>
-<span class="sourceLineNo">085</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.85"></a>
-<span class="sourceLineNo">086</span> */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>@InterfaceAudience.Public<a name="line.87"></a>
-<span class="sourceLineNo">088</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public static final String NAME = "exportsnapshot";<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  static class Testing {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    int failuresCountToInject = 0;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    int injectedFailureCount = 0;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  // Command line options and defaults.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  static final class Options {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        "Target name for the snapshot.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        + "destination hdfs://");<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        "Do not verify checksum, use name+length only.");<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        "Change the owner of the files to the specified one.");<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "Change the group of the files to the specified one.");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        "Change the permission of the files to the specified one.");<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        "Limit bandwidth to this value in MB/second.");<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public enum Counter {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>                                                   NullWritable, NullWritable&gt; {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    private boolean verifyChecksum;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    private String filesGroup;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    private String filesUser;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    private short filesMode;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    private int bufferSize;<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    private FileSystem outputFs;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    private Path outputArchive;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    private Path outputRoot;<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>    private FileSystem inputFs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    private Path inputArchive;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    private Path inputRoot;<a name="line.171"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.ExecutionException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.concurrent.ExecutorService;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import java.util.concurrent.Executors;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import java.util.concurrent.Future;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import java.util.function.BiConsumer;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileChecksum;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileStatus;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.FileSystem;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.Path;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.HConstants;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.io.FileLink;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.io.WALLink;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.io.hadoopbackport.ThrottledInputStream;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.AbstractHBaseTool;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.io.BytesWritable;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.io.IOUtils;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.io.NullWritable;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.io.Writable;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.mapreduce.InputFormat;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.mapreduce.InputSplit;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.mapreduce.Job;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.mapreduce.JobContext;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.mapreduce.Mapper;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.mapreduce.RecordReader;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.mapreduce.TaskAttemptContext;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.mapreduce.security.TokenCache;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.util.StringUtils;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.util.Tool;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.slf4j.Logger;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.slf4j.LoggerFactory;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>/**<a name="line.84"></a>
+<span class="sourceLineNo">085</span> * Export the specified snapshot to a given FileSystem.<a name="line.85"></a>
+<span class="sourceLineNo">086</span> *<a name="line.86"></a>
+<span class="sourceLineNo">087</span> * The .snapshot/name folder is copied to the destination cluster<a name="line.87"></a>
+<span class="sourceLineNo">088</span> * and then all the hfiles/wals are copied using a Map-Reduce Job in the .archive/ location.<a name="line.88"></a>
+<span class="sourceLineNo">089</span> * When everything is done, the second cluster can restore the snapshot.<a name="line.89"></a>
+<span class="sourceLineNo">090</span> */<a name="line.90"></a>
+<span class="sourceLineNo">091</span>@InterfaceAudience.Public<a name="line.91"></a>
+<span class="sourceLineNo">092</span>public class ExportSnapshot extends AbstractHBaseTool implements Tool {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static final String NAME = "exportsnapshot";<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  /** Configuration prefix for overrides for the source filesystem */<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public static final String CONF_SOURCE_PREFIX = NAME + ".from.";<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  /** Configuration prefix for overrides for the destination filesystem */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public static final String CONF_DEST_PREFIX = NAME + ".to.";<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class);<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private static final String MR_NUM_MAPS = "mapreduce.job.maps";<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits";<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final String CONF_SNAPSHOT_NAME = "snapshot.export.format.snapshot.name";<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final String CONF_SNAPSHOT_DIR = "snapshot.export.format.snapshot.dir";<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final String CONF_FILES_USER = "snapshot.export.files.attributes.user";<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final String CONF_FILES_GROUP = "snapshot.export.files.attributes.group";<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String CONF_FILES_MODE = "snapshot.export.files.attributes.mode";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final String CONF_CHECKSUM_VERIFY = "snapshot.export.checksum.verify";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  private static final String CONF_OUTPUT_ROOT = "snapshot.export.output.root";<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  private static final String CONF_INPUT_ROOT = "snapshot.export.input.root";<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  private static final String CONF_BUFFER_SIZE = "snapshot.export.buffer.size";<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private static final String CONF_MAP_GROUP = "snapshot.export.default.map.group";<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  private static final String CONF_COPY_MANIFEST_THREADS =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      "snapshot.export.copy.references.threads";<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private static final int DEFAULT_COPY_MANIFEST_THREADS =<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      Runtime.getRuntime().availableProcessors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>  static class Testing {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    static final String CONF_TEST_FAILURE_COUNT = "test.snapshot.export.failure.count";<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    int failuresCountToInject = 0;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    int injectedFailureCount = 0;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  // Command line options and defaults.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static final class Options {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to restore.");<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    static final Option TARGET_NAME = new Option(null, "target", true,<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        "Target name for the snapshot.");<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    static final Option COPY_TO = new Option(null, "copy-to", true, "Remote "<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        + "destination hdfs://");<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    static final Option COPY_FROM = new Option(null, "copy-from", true,<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        "Input folder hdfs:// (default hbase.rootdir)");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        "Do not verify checksum, use name+length only.");<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "Do not verify the integrity of the exported snapshot.");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    static final Option OVERWRITE = new Option(null, "overwrite", false,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        "Rewrite the snapshot manifest if already exists.");<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    static final Option CHUSER = new Option(null, "chuser", true,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        "Change the owner of the files to the specified one.");<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    static final Option CHGROUP = new Option(null, "chgroup", true,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        "Change the group of the files to the specified one.");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    static final Option CHMOD = new Option(null, "chmod", true,<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        "Change the permission of the files to the specified one.");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    static final Option MAPPERS = new Option(null, "mappers", true,<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        "Number of mappers to use during the copy (mapreduce.job.maps).");<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    static final Option BANDWIDTH = new Option(null, "bandwidth", true,<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        "Limit bandwidth to this value in MB/second.");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  // Export Map-Reduce Counters, to keep track of the progress<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public enum Counter {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    MISSING_FILES, FILES_COPIED, FILES_SKIPPED, COPY_FAILED,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    BYTES_EXPECTED, BYTES_SKIPPED, BYTES_COPIED<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private static class ExportMapper extends Mapper&lt;BytesWritable, NullWritable,<a name="line.161"></a>
+<span class="sourceLineNo">162</span>                                                   NullWritable, NullWritable&gt; {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    final static int REPORT_SIZE = 1 * 1024 * 1024;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    final static int BUFFER_SIZE = 64 * 1024;<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    private boolean verifyChecksum;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    private String filesGroup;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    private String filesUser;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    private short filesMode;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    private int bufferSize;<a name="line.171"></a>
 <span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>    private static Testing testing = new Testing();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public void setup(Context context) throws IOException {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      Configuration conf = context.getConfiguration();<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.188"></a>
+<span class="sourceLineNo">173</span>    private FileSystem outputFs;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    private Path outputArchive;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    private Path outputRoot;<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>    private FileSystem inputFs;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    private Path inputArchive;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    private Path inputRoot;<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private static Testing testing = new Testing();<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>    @Override<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    public void setup(Context context) throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      Configuration conf = context.getConfiguration();<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>      Configuration srcConf = HBaseConfiguration.createClusterConf(conf, null, CONF_SOURCE_PREFIX);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX);<a name="line.188"></a>
 <span class="sourceLineNo">189</span><a name="line.189"></a>
-<span class="sourceLineNo">190</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>      try {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      } catch (IOException e) {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      }<a name="line.198"></a>
-<span class="sourceLineNo">199</span><a name="line.199"></a>
-<span class="sourceLineNo">200</span>      try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      } catch (IOException e) {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span><a name="line.206"></a>
-<span class="sourceLineNo">207</span>      // Use the default block size of the outputFs if bigger<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>      for (Counter c : Counter.values()) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        context.getCounter(c).increment(0);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        // task.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    protected void cleanup(Context context) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      IOUtils.closeStream(inputFs);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      IOUtils.closeStream(outputFs);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    @Override<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        throws InterruptedException, IOException {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>      copyFile(context, inputInfo, outputPath);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>    /**<a name="line.238"></a>
-<span class="sourceLineNo">239</span>     * Returns the location where the inputPath will be copied.<a name="line.239"></a>
-<span class="sourceLineNo">240</span>     */<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Path path = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      switch (inputInfo.getType()) {<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        case HFILE:<a name="line.244"></a>
-<span class="sourceLineNo">245</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          String family = inputPath.getParent().getName();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.250"></a>
-<span class="sourceLineNo">251</span>              new Path(region, new Path(family, hfile)));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          break;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        case WAL:<a name="line.253"></a>
-<span class="sourceLineNo">254</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          break;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        default:<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      return new Path(outputArchive, path);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    /**<a name="line.262"></a>
-<span class="sourceLineNo">263</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.263"></a>
-<span class="sourceLineNo">264</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.264"></a>
-<span class="sourceLineNo">265</span>     */<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        throws IOException {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      testing.injectedFailureCount++;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        final Path outputPath) throws IOException {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // Get the file information<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      if (outputFs.exists(outputPath)) {<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          return;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>        }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>      try {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>        // Ensure that the output folder is there and copy the file<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        createOutputPath(outputPath.getParent());<a name="line.303"></a>
-<span class="sourceLineNo">304</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        try {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.306"></a>
-<span class="sourceLineNo">307</span>        } finally {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          out.close();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        }<a name="line.309"></a>
-<span class="sourceLineNo">310</span><a name="line.310"></a>
-<span class="sourceLineNo">311</span>        // Try to Preserve attributes<a name="line.311"></a>
-<span class="sourceLineNo">312</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.312"></a>
-<span class="sourceLineNo">313</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        }<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      } finally {<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        in.close();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>        injectTestFailure(context, inputInfo);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    }<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>    /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>     * Create the output folder and optionally set ownership.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>     */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>        outputFs.mkdirs(path);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      } else {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        Path parent = path.getParent();<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>          createOutputPath(parent);<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        outputFs.mkdirs(path);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        if (filesUser != null || filesGroup != null) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // override the owner when non-null user/group is specified<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>        }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>        if (filesMode &gt; 0) {<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.338"></a>
+<span class="sourceLineNo">190</span>      verifyChecksum = conf.getBoolean(CONF_CHECKSUM_VERIFY, true);<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>      filesGroup = conf.get(CONF_FILES_GROUP);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      filesUser = conf.get(CONF_FILES_USER);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      filesMode = (short)conf.getInt(CONF_FILES_MODE, 0);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      outputRoot = new Path(conf.get(CONF_OUTPUT_ROOT));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      inputRoot = new Path(conf.get(CONF_INPUT_ROOT));<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span>      inputArchive = new Path(inputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      outputArchive = new Path(outputRoot, HConstants.HFILE_ARCHIVE_DIRECTORY);<a name="line.199"></a>
+<span class="sourceLineNo">200</span><a name="line.200"></a>
+<span class="sourceLineNo">201</span>      try {<a name="line.201"></a>
+<span class="sourceLineNo">202</span>        srcConf.setBoolean("fs." + inputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        inputFs = FileSystem.get(inputRoot.toUri(), srcConf);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      } catch (IOException e) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        throw new IOException("Could not get the input FileSystem with root=" + inputRoot, e);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      }<a name="line.206"></a>
+<span class="sourceLineNo">207</span><a name="line.207"></a>
+<span class="sourceLineNo">208</span>      try {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>        destConf.setBoolean("fs." + outputRoot.toUri().getScheme() + ".impl.disable.cache", true);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        outputFs = FileSystem.get(outputRoot.toUri(), destConf);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      } catch (IOException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        throw new IOException("Could not get the output FileSystem with root="+ outputRoot, e);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // Use the default block size of the outputFs if bigger<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>      for (Counter c : Counter.values()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        context.getCounter(c).increment(0);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      if (context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>        testing.failuresCountToInject = conf.getInt(Testing.CONF_TEST_FAILURE_COUNT, 0);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        // Get number of times we have already injected failure based on attempt number of this<a name="line.225"></a>
+<span class="sourceLineNo">226</span>        // task.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        testing.injectedFailureCount = context.getTaskAttemptID().getId();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>    @Override<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    protected void cleanup(Context context) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      IOUtils.closeStream(inputFs);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      IOUtils.closeStream(outputFs);<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    @Override<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    public void map(BytesWritable key, NullWritable value, Context context)<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throws InterruptedException, IOException {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      SnapshotFileInfo inputInfo = SnapshotFileInfo.parseFrom(key.copyBytes());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      Path outputPath = getOutputPath(inputInfo);<a name="line.241"></a>
+<span class="sourceLineNo">242</span><a name="line.242"></a>
+<span class="sourceLineNo">243</span>      copyFile(context, inputInfo, outputPath);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span><a name="line.245"></a>
+<span class="sourceLineNo">246</span>    /**<a name="line.246"></a>
+<span class="sourceLineNo">247</span>     * Returns the location where the inputPath will be copied.<a name="line.247"></a>
+<span class="sourceLineNo">248</span>     */<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    private Path getOutputPath(final SnapshotFileInfo inputInfo) throws IOException {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      Path path = null;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      switch (inputInfo.getType()) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        case HFILE:<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          Path inputPath = new Path(inputInfo.getHfile());<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          String family = inputPath.getParent().getName();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>          TableName table =HFileLink.getReferencedTableName(inputPath.getName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>          String region = HFileLink.getReferencedRegionName(inputPath.getName());<a name="line.256"></a>
+<span class="sourceLineNo">257</span>          String hfile = HFileLink.getReferencedHFileName(inputPath.getName());<a name="line.257"></a>
+<span class="sourceLineNo">258</span>          path = new Path(FSUtils.getTableDir(new Path("./"), table),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>              new Path(region, new Path(family, hfile)));<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          break;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        case WAL:<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          LOG.warn("snapshot does not keeps WALs: " + inputInfo);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          break;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>        default:<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          throw new IOException("Invalid File Type: " + inputInfo.getType().toString());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      }<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      return new Path(outputArchive, path);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    /**<a name="line.270"></a>
+<span class="sourceLineNo">271</span>     * Used by TestExportSnapshot to test for retries when failures happen.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>     * Failure is injected in {@link #copyFile(Context, SnapshotFileInfo, Path)}.<a name="line.272"></a>
+<span class="sourceLineNo">273</span>     */<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    private void injectTestFailure(final Context context, final SnapshotFileInfo inputInfo)<a name="line.274"></a>
+<span class="sourceLineNo">275</span>        throws IOException {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      if (!context.getConfiguration().getBoolean(Testing.CONF_TEST_FAILURE, false)) return;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      if (testing.injectedFailureCount &gt;= testing.failuresCountToInject) return;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      testing.injectedFailureCount++;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      LOG.debug("Injecting failure. Count: " + testing.injectedFailureCount);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      throw new IOException(String.format("TEST FAILURE (%d of max %d): Unable to copy input=%s",<a name="line.281"></a>
+<span class="sourceLineNo">282</span>          testing.injectedFailureCount, testing.failuresCountToInject, inputInfo));<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    private void copyFile(final Context context, final SnapshotFileInfo inputInfo,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        final Path outputPath) throws IOException {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // Get the file information<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      FileStatus inputStat = getSourceFileStatus(context, inputInfo);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Verify if the output file exists and is the same that we want to copy<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      if (outputFs.exists(outputPath)) {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        FileStatus outputStat = outputFs.getFileStatus(outputPath);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        if (outputStat != null &amp;&amp; sameFile(inputStat, outputStat)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>          LOG.info("Skip copy " + inputStat.getPath() + " to " + outputPath + ", same file.");<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          context.getCounter(Counter.FILES_SKIPPED).increment(1);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>          context.getCounter(Counter.BYTES_SKIPPED).increment(inputStat.getLen());<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          return;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        }<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>      InputStream in = openSourceFile(context, inputInfo);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      int bandwidthMB = context.getConfiguration().getInt(CONF_BANDWIDTH_MB, 100);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      if (Integer.MAX_VALUE != bandwidthMB) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        in = new ThrottledInputStream(new BufferedInputStream(in), bandwidthMB * 1024 * 1024L);<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      }<a name="line.305"></a>
+<span class="sourceLineNo">306</span><a name="line.306"></a>
+<span class="sourceLineNo">307</span>      try {<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        context.getCounter(Counter.BYTES_EXPECTED).increment(inputStat.getLen());<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>        // Ensure that the output folder is there and copy the file<a name="line.310"></a>
+<span class="sourceLineNo">311</span>        createOutputPath(outputPath.getParent());<a name="line.311"></a>
+<span class="sourceLineNo">312</span>        FSDataOutputStream out = outputFs.create(outputPath, true);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>        try {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          copyData(context, inputStat.getPath(), in, outputPath, out, inputStat.getLen());<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        } finally {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>          out.close();<a name="line.316"></a>
+<span class="sourceLineNo">317</span>        }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>        // Try to Preserve attributes<a name="line.319"></a>
+<span class="sourceLineNo">320</span>        if (!preserveAttributes(outputPath, inputStat)) {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          LOG.warn("You may have to run manually chown on: " + outputPath);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      } finally {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>        in.close();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>        injectTestFailure(context, inputInfo);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      }<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>    /**<a name="line.329"></a>
+<span class="sourceLineNo">330</span>     * Create the output folder and optionally set ownership.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>     */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    private void createOutputPath(final Path path) throws IOException {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (filesUser == null &amp;&amp; filesGroup == null) {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        outputFs.mkdirs(path);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      } else {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        Path parent = path.getParent();<a name="line.336"></a>
+<span class="sourceLineNo">337</span>        if (!outputFs.exists(parent) &amp;&amp; !parent.isRoot()) {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          createOutputPath(parent);<a name="line.338"></a>
 <span class="sourceLineNo">339</span>        }<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>    /**<a name="line.343"></a>
-<span class="sourceLineNo">344</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.344"></a>
-<span class="sourceLineNo">345</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.345"></a>
-<span class="sourceLineNo">346</span>     * that doesn't have the "hbase" user.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>     *<a name="line.347"></a>
-<span class="sourceLineNo">348</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.348"></a>
-<span class="sourceLineNo">349</span>     * that knows is available on the system.<a name="line.349"></a>
-<span class="sourceLineNo">350</span>     */<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      FileStatus stat;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      try {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        stat = outputFs.getFileStatus(path);<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      } catch (IOException e) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        return false;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      }<a name="line.358"></a>
-<span class="sourceLineNo">359</span><a name="line.359"></a>
-<span class="sourceLineNo">360</span>      try {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      } catch (IOException e) {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        return false;<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>      boolean hasRefStat = (refStat != null);<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        try {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>            outputFs.setOwner(path, user, group);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>          }<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        } catch (IOException e) {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.380"></a>
-<span class="sourceLineNo">381</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>                   user + " group=" + group);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>          return false;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>        }<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
-<span class="sourceLineNo">386</span><a name="line.386"></a>
-<span class="sourceLineNo">387</span>      return true;<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    }<a name="line.392"></a>
-<span class="sourceLineNo">393</span><a name="line.393"></a>
-<span class="sourceLineNo">394</span>    private void copyData(final Context context,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        final Path inputPath, final InputStream in,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        final Path outputPath, final FSDataOutputStream out,<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        final long inputFileSize)<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        throws IOException {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      final String statusMessage = "copied %s/" + StringUtils.humanReadableInt(inputFileSize) +<a name="line.399"></a>
-<span class="sourceLineNo">400</span>                                   " (%.1f%%)";<a name="line.400"></a>
+<span class="sourceLineNo">340</span>        outputFs.mkdirs(path);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>        if (filesUser != null || filesGroup != null) {<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          // override the owner when non-null user/group is specified<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          outputFs.setOwner(path, filesUser, filesGroup);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        }<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        if (filesMode &gt; 0) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      }<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    }<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>    /**<a name="line.351"></a>
+<span class="sourceLineNo">352</span>     * Try to Preserve the files attribute selected by the user copying them from the source file<a name="line.352"></a>
+<span class="sourceLineNo">353</span>     * This is only required when you are exporting as a different user than "hbase" or on a system<a name="line.353"></a>
+<span class="sourceLineNo">354</span>     * that doesn't have the "hbase" user.<a name="line.354"></a>
+<span class="sourceLineNo">355</span>     *<a name="line.355"></a>
+<span class="sourceLineNo">356</span>     * This is not considered a blocking failure since the user can force a chmod with the user<a name="line.356"></a>
+<span class="sourceLineNo">357</span>     * that knows is available on the system.<a name="line.357"></a>
+<span class="sourceLineNo">358</span>     */<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    private boolean preserveAttributes(final Path path, final FileStatus refStat) {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      FileStatus stat;<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      try {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        stat = outputFs.getFileStatus(path);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      } catch (IOException e) {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>        LOG.warn("Unable to get the status for file=" + path);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        return false;<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      }<a name="line.366"></a>
+<span class="sourceLineNo">367</span><a name="line.367"></a>
+<span class="sourceLineNo">368</span>      try {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>        if (filesMode &gt; 0 &amp;&amp; stat.getPermission().toShort() != filesMode) {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>          outputFs.setPermission(path, new FsPermission(filesMode));<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        } else if (refStat != null &amp;&amp; !stat.getPermission().equals(refStat.getPermission())) {<a name="line.371"></a>
+<span class="sourceLineNo">372</span>          outputFs.setPermission(path, refStat.getPermission());<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        }<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>        LOG.warn("Unable to set the permission for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.375"></a>
+<span class="sourceLineNo">376</span>        return false;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      }<a name="line.377"></a>
+<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">379</span>      boolean hasRefStat = (refStat != null);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>      String user = stringIsNotEmpty(filesUser) || !hasRefStat ? filesUser : refStat.getOwner();<a name="line.380"></a>
+<span class="sourceLineNo">381</span>      String group = stringIsNotEmpty(filesGroup) || !hasRefStat ? filesGroup : refStat.getGroup();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>      if (stringIsNotEmpty(user) || stringIsNotEmpty(group)) {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        try {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          if (!(user.equals(stat.getOwner()) &amp;&amp; group.equals(stat.getGroup()))) {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>            outputFs.setOwner(path, user, group);<a name="line.385"></a>
+<span class="sourceLineNo">386</span>          }<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        } catch (IOException e) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          LOG.warn("Unable to set the owner/group for file="+ stat.getPath() +": "+ e.getMessage());<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          LOG.warn("The user/group may not exist on the destination cluster: user=" +<a name="line.389"></a>
+<span class="sourceLineNo">390</span>                   user + " group=" + group);<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          return false;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      }<a name="line.393"></a>
+<span class="sourceLineNo">394</span><a name="line.394"></a>
+<span class="sourceLineNo">395</span>      return true;<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    }<a name="line.396"></a>
+<span class="sourceLineNo">397</span><a name="line.397"></a>
+<span class="sourceLineNo">398</span>    private boolean stringIsNotEmpty(final String str) {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      return str != null &amp;&amp; str.length() &gt; 0;<a name="line.399"></a>
+<span class="sourceLineNo">400</span>    }<a name="line.400"></a>
 <span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      try {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        byte[] buffer = new byte[bufferSize];<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        long totalBytesWritten = 0;<a name="line.404"></a>
-<span class="sourceLineNo">405</span>        int reportBytes = 0;<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        int bytesRead;<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>        long stime = System.currentTimeMillis();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        while ((bytesRead = in.read(buffer)) &gt; 0) {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>          out.write(buffer, 0, bytesRead);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>          totalBytesWritten += bytesRead;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          reportBytes += bytesRead;<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>          if (reportBytes &gt;= REPORT_SIZE) {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>            context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>            context.setStatus(String.format(statusMessage,<a name="line.416"></a>
-<span class="sourceLineNo">417</span>                              StringUtils.humanReadableInt(totalBytesWritten),<a name="line.417"></a>
-<span class="sourceLineNo">418</span>                              (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                              " from " + inputPath + " to " + outputPath);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>            reportBytes = 0;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          }<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        long etime = System.currentTimeMillis();<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        context.setStatus(String.format(statusMessage,<a name="line.426"></a>
-<span class="sourceLineNo">427</span>                          StringUtils.humanReadableInt(totalBytesWritten),<a name="line.427"></a>
-<span class="sourceLineNo">428</span>                          (totalBytesWritten/(float)inputFileSize) * 100.0f) +<a name="line.428"></a>
-<span class="sourceLineNo">429</span>                          " from " + inputPath + " to " + outputPath);<a name="line.429"></a>
-<span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>        // Verify that the written size match<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        if (totalBytesWritten != inputFileSize) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          String msg = "number of bytes copied not matching copied=" + totalBytesWritten +<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                       " expected=" + inputFileSize + " for file=" + inputPath;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          throw new IOException(msg);<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        }<a name="line.436"></a>
-<span class="sourceLineNo">437</span><a name="line.437"></a>
-<span class="sourceLineNo">438</span>        LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        LOG.info("size=" + totalBytesWritten +<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            " (" + StringUtils.humanReadableInt(totalBytesWritten) + ")" +<a name="line.440"></a>
-<span class="sourceLineNo">441</span>            " time=" + StringUtils.formatTimeDiff(etime, stime) +<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            String.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime)/1000.0))/1048576.0));<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        context.getCounter(Counter.FILES_COPIED).increment(1);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      } catch (IOException e) {<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        LOG.error("Error copying " + inputPath + " to " + outputPath, e);<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        context.getCounter(Counter.COPY_FAILED).increment(1);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        throw e;<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      }<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    }<a name="line.449"></a>
-<span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    /**<a name="line.451"></a>
-<span class="sourceLineNo">452</span>     * Try to open the "source" file.<a name="line.452"></a>
-<span class="sourceLineNo">453</span>     * Throws an IOException if the communication with the inputFs fail or<a name="line.453"></a>
-<span class="sourceLineNo">454</span>     * if the file is not found.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>     */<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    private FSDataInputStream openSourceFile(Context context, final SnapshotFileInfo fileInfo)<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            throws IOException {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      try {<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        Configuration conf = context.getConfiguration();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        FileLink link = null;<a name="line.460"></a>
-<span class="sourceLineNo">461</span>        switch (fileInfo.getType()) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>          case HFILE:<a name="line.462"></a>
-<span class="sourceLineNo">463</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.463"></a>
-<span class="sourceLineNo">464</span>            link = getFileLink(inputPath, conf);<a name="line.464"></a>
-<span class="sourceLineNo">465</span>            break;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>          case WAL:<a name="line.466"></a>
-<span class="sourceLineNo">467</span>            String serverName = fileInfo.getWalServer();<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            String logName = fileInfo.getWalName();<a name="line.468"></a>
-<span class="sourceLineNo">469</span>            link = new WALLink(inputRoot, serverName, logName);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>            break;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>          default:<a name="line.471"></a>
-<span class="sourceLineNo">472</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.472"></a>
-<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        return link.open(inputFs);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>      } catch (IOException e) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        context.getCounter(Counter.MISSING_FILES).increment(1);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>        LOG.error("Unable to open source file=" + fileInfo.toString(), e);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    private FileStatus getSourceFileStatus(Context context, final SnapshotFileInfo fileInfo)<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        throws IOException {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        Configuration conf = context.getConfiguration();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        FileLink link = null;<a name="line.486"></a>
-<span class="sourceLineNo">487</span>        switch (fileInfo.getType()) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          case HFILE:<a name="line.488"></a>
-<span class="sourceLineNo">489</span>            Path inputPath = new Path(fileInfo.getHfile());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>            link = getFileLink(inputPath, conf);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            break;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>          case WAL:<a name="line.492"></a>
-<span class="sourceLineNo">493</span>            link = new WALLink(inputRoot, fileInfo.getWalServer(), fileInfo.getWalName());<a name="line.493"></a>
-<span class="sourceLineNo">494</span>            break;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          default:<a name="line.495"></a>
-<span class="sourceLineNo">496</span>            throw new IOException("Invalid File Type: " + fileInfo.getType().toString());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        }<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        return link.getFileStatus(inputFs);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      } catch (FileNotFoundException

<TRUNCATED>

[03/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.html
index ec995d2..01a18e6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSUtils.html
@@ -51,1705 +51,1748 @@
 <span class="sourceLineNo">043</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.ExecutionException;<a name="line.44"></a>
 <span class="sourceLineNo">045</span>import java.util.concurrent.ExecutorService;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import java.util.concurrent.Future;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import java.util.concurrent.FutureTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import java.util.concurrent.TimeUnit;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import java.util.regex.Pattern;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.conf.Configuration;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileStatus;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileSystem;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.Path;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.PathFilter;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.HConstants;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.io.IOUtils;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.util.Progressable;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.util.StringUtils;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.Logger;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.slf4j.LoggerFactory;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>/**<a name="line.98"></a>
-<span class="sourceLineNo">099</span> * Utility methods for interacting with the underlying file system.<a name="line.99"></a>
-<span class="sourceLineNo">100</span> */<a name="line.100"></a>
-<span class="sourceLineNo">101</span>@InterfaceAudience.Private<a name="line.101"></a>
-<span class="sourceLineNo">102</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /** Set to true on Windows platforms */<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected FSUtils() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    super();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span><a name="line.115"></a>
-<span class="sourceLineNo">116</span>  /**<a name="line.116"></a>
-<span class="sourceLineNo">117</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.117"></a>
-<span class="sourceLineNo">118</span>   * @throws IOException<a name="line.118"></a>
-<span class="sourceLineNo">119</span>   */<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    FileSystem fileSystem = fs;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // Check its backing fs for dfs-ness.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    if (fs instanceof HFileSystem) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.132"></a>
-<span class="sourceLineNo">133</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * @param pathToSearch Path we will be trying to match.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   * @param pathTail<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path tailPath = pathTail;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    String tailName;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    Path toSearch = pathToSearch;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    String toSearchName;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    boolean result = false;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    do {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      tailName = tailPath.getName();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        result = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        break;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      toSearchName = toSearch.getName();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      tailPath = tailPath.getParent();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      toSearch = toSearch.getParent();<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    } while(tailName.equals(toSearchName));<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    return result;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    String scheme = fs.getUri().getScheme();<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    if (scheme == null) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      LOG.warn("Could not find scheme for uri " +<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          fs.getUri() + ", default to hdfs");<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      scheme = "hdfs";<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return fsUtils;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  /**<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   * Delete the region directory if exists.<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param conf<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param hri<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @return True if deleted the region directory.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   * @throws IOException<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   */<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  throws IOException {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    Path rootDir = getRootDir(conf);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return deleteDirectory(fs,<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.185"></a>
-<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span> /**<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;ol&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.194"></a>
-<span class="sourceLineNo">195</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * &lt;/ol&gt;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   * @param conf configurations<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path {@link Path} to the file to write<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @param perm permissions<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   * @param favoredNodes<a name="line.203"></a>
-<span class="sourceLineNo">204</span>   * @return output stream to the created file<a name="line.204"></a>
-<span class="sourceLineNo">205</span>   * @throws IOException if the file cannot be created<a name="line.205"></a>
-<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    if (fs instanceof HFileSystem) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        // compatibility.<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        try {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.217"></a>
-<span class="sourceLineNo">218</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            getDefaultBufferSize(backingFs),<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.221"></a>
-<span class="sourceLineNo">222</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        } catch (InvocationTargetException ite) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // Function was properly called, but threw it's own exception.<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          throw new IOException(ite.getCause());<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        } catch (NoSuchMethodException e) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        } catch (IllegalArgumentException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>        } catch (SecurityException e) {<a name="line.231"></a>
+<span class="sourceLineNo">046</span>import java.util.concurrent.Executors;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import java.util.concurrent.Future;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import java.util.concurrent.FutureTask;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import java.util.concurrent.TimeUnit;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import java.util.regex.Pattern;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.BlockLocation;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.FileStatus;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.FileSystem;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.fs.FileUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.fs.Path;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.fs.PathFilter;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.fs.permission.FsAction;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.ClusterId;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.HColumnDescriptor;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.HConstants;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.HRegionInfo;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.TableName;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.fs.HFileSystem;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.StoreFileInfo;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.DFSHedgedReadMetrics;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hdfs.protocol.HdfsConstants;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.io.IOUtils;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.util.Progressable;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.util.StringUtils;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.primitives.Ints;<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos;<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>/**<a name="line.100"></a>
+<span class="sourceLineNo">101</span> * Utility methods for interacting with the underlying file system.<a name="line.101"></a>
+<span class="sourceLineNo">102</span> */<a name="line.102"></a>
+<span class="sourceLineNo">103</span>@InterfaceAudience.Private<a name="line.103"></a>
+<span class="sourceLineNo">104</span>public abstract class FSUtils extends CommonFSUtils {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class);<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize";<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  private static final int DEFAULT_THREAD_POOLSIZE = 2;<a name="line.108"></a>
+<span class="sourceLineNo">109</span><a name="line.109"></a>
+<span class="sourceLineNo">110</span>  /** Set to true on Windows platforms */<a name="line.110"></a>
+<span class="sourceLineNo">111</span>  @VisibleForTesting // currently only used in testing. TODO refactor into a test class<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  public static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  protected FSUtils() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    super();<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * @return True is &lt;code&gt;fs&lt;/code&gt; is instance of DistributedFileSystem<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   * @throws IOException<a name="line.120"></a>
+<span class="sourceLineNo">121</span>   */<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static boolean isDistributedFileSystem(final FileSystem fs) throws IOException {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    FileSystem fileSystem = fs;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If passed an instance of HFileSystem, it fails instanceof DistributedFileSystem.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    // Check its backing fs for dfs-ness.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (fs instanceof HFileSystem) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fileSystem = ((HFileSystem)fs).getBackingFs();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    return fileSystem instanceof DistributedFileSystem;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  /**<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * Compare path component of the Path URI; e.g. if hdfs://a/b/c and /a/b/c, it will compare the<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * '/a/b/c' part. If you passed in 'hdfs://a/b/c and b/c, it would return true.  Does not consider<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * schema; i.e. if schemas different but path or subpath matches, the two will equate.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @param pathToSearch Path we will be trying to match.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param pathTail<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return True if &lt;code&gt;pathTail&lt;/code&gt; is tail on the path of &lt;code&gt;pathToSearch&lt;/code&gt;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    if (pathToSearch.depth() != pathTail.depth()) return false;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path tailPath = pathTail;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    String tailName;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    Path toSearch = pathToSearch;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String toSearchName;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    boolean result = false;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    do {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      tailName = tailPath.getName();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (tailName == null || tailName.length() &lt;= 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        result = true;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        break;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      toSearchName = toSearch.getName();<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      if (toSearchName == null || toSearchName.length() &lt;= 0) break;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      // Move up a parent on each path for next go around.  Path doesn't let us go off the end.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      tailPath = tailPath.getParent();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      toSearch = toSearch.getParent();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } while(tailName.equals(toSearchName));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    return result;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  public static FSUtils getInstance(FileSystem fs, Configuration conf) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    String scheme = fs.getUri().getScheme();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    if (scheme == null) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      LOG.warn("Could not find scheme for uri " +<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          fs.getUri() + ", default to hdfs");<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      scheme = "hdfs";<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    Class&lt;?&gt; fsUtilsClass = conf.getClass("hbase.fsutil." +<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        scheme + ".impl", FSHDFSUtils.class); // Default to HDFS impl<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    return fsUtils;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
+<span class="sourceLineNo">174</span><a name="line.174"></a>
+<span class="sourceLineNo">175</span>  /**<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * Delete the region directory if exists.<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param conf<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param hri<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @return True if deleted the region directory.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @throws IOException<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   */<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  public static boolean deleteRegionDir(final Configuration conf, final HRegionInfo hri)<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    Path rootDir = getRootDir(conf);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    FileSystem fs = rootDir.getFileSystem(conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return deleteDirectory(fs,<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      new Path(getTableDir(rootDir, hri.getTable()), hri.getEncodedName()));<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span> /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Create the specified file on the filesystem. By default, this will:<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * &lt;ol&gt;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * &lt;li&gt;overwrite the file if it exists&lt;/li&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   * &lt;li&gt;apply the umask in the configuration (if it is enabled)&lt;/li&gt;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>   * &lt;li&gt;use the fs configured buffer size (or 4096 if not set)&lt;/li&gt;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>   * &lt;li&gt;use the configured column family replication or default replication if<a name="line.196"></a>
+<span class="sourceLineNo">197</span>   * {@link HColumnDescriptor#DEFAULT_DFS_REPLICATION}&lt;/li&gt;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * &lt;li&gt;use the default block size&lt;/li&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * &lt;li&gt;not track progress&lt;/li&gt;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * &lt;/ol&gt;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @param conf configurations<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param fs {@link FileSystem} on which to write the file<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @param path {@link Path} to the file to write<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * @param perm permissions<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @param favoredNodes<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   * @return output stream to the created file<a name="line.206"></a>
+<span class="sourceLineNo">207</span>   * @throws IOException if the file cannot be created<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  public static FSDataOutputStream create(Configuration conf, FileSystem fs, Path path,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      FsPermission perm, InetSocketAddress[] favoredNodes) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    if (fs instanceof HFileSystem) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      FileSystem backingFs = ((HFileSystem)fs).getBackingFs();<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      if (backingFs instanceof DistributedFileSystem) {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        // Try to use the favoredNodes version via reflection to allow backwards-<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        // compatibility.<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        short replication = Short.parseShort(conf.get(HColumnDescriptor.DFS_REPLICATION,<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          String.valueOf(HColumnDescriptor.DEFAULT_DFS_REPLICATION)));<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        try {<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          return (FSDataOutputStream) (DistributedFileSystem.class.getDeclaredMethod("create",<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            Path.class, FsPermission.class, boolean.class, int.class, short.class, long.class,<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            Progressable.class, InetSocketAddress[].class).invoke(backingFs, path, perm, true,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>            getDefaultBufferSize(backingFs),<a name="line.222"></a>
+<span class="sourceLineNo">223</span>            replication &gt; 0 ? replication : getDefaultReplication(backingFs, path),<a name="line.223"></a>
+<span class="sourceLineNo">224</span>            getDefaultBlockSize(backingFs, path), null, favoredNodes));<a name="line.224"></a>
+<span class="sourceLineNo">225</span>        } catch (InvocationTargetException ite) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          // Function was properly called, but threw it's own exception.<a name="line.226"></a>
+<span class="sourceLineNo">227</span>          throw new IOException(ite.getCause());<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        } catch (NoSuchMethodException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          LOG.debug("DFS Client does not support most favored nodes create; using default create");<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          if (LOG.isTraceEnabled()) LOG.trace("Ignoring; use default create", e);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (IllegalArgumentException e) {<a name="line.231"></a>
 <span class="sourceLineNo">232</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        } catch (IllegalAccessException e) {<a name="line.233"></a>
+<span class="sourceLineNo">233</span>        } catch (SecurityException e) {<a name="line.233"></a>
 <span class="sourceLineNo">234</span>          LOG.debug("Ignoring (most likely Reflection related exception) " + e);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        }<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      }<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    return create(fs, path, perm, true);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>  /**<a name="line.241"></a>
-<span class="sourceLineNo">242</span>   * Checks to see if the specified file system is available<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   *<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   * @param fs filesystem<a name="line.244"></a>
-<span class="sourceLineNo">245</span>   * @throws IOException e<a name="line.245"></a>
-<span class="sourceLineNo">246</span>   */<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  public static void checkFileSystemAvailable(final FileSystem fs)<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  throws IOException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    if (!(fs instanceof DistributedFileSystem)) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    }<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    IOException exception = null;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    DistributedFileSystem dfs = (DistributedFileSystem) fs;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      if (dfs.exists(new Path("/"))) {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        return;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    } catch (IOException e) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      exception = e instanceof RemoteException ?<a name="line.259"></a>
-<span class="sourceLineNo">260</span>              ((RemoteException)e).unwrapRemoteException() : e;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    }<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    try {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      fs.close();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } catch (Exception e) {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      LOG.error("file system close failed: ", e);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    IOException io = new IOException("File system is not available");<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    io.initCause(exception);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    throw io;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * We use reflection because {@link DistributedFileSystem#setSafeMode(<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1<a name="line.274"></a>
-<span class="sourceLineNo">275</span>   *<a name="line.275"></a>
-<span class="sourceLineNo">276</span>   * @param dfs<a name="line.276"></a>
-<span class="sourceLineNo">277</span>   * @return whether we're in safe mode<a name="line.277"></a>
-<span class="sourceLineNo">278</span>   * @throws IOException<a name="line.278"></a>
-<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    boolean inSafeMode = false;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    try {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class&lt;?&gt; []{<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      inSafeMode = (Boolean) m.invoke(dfs,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    } catch (Exception e) {<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      if (e instanceof IOException) throw (IOException) e;<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // Check whether dfs is on safemode.<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      inSafeMode = dfs.setSafeMode(<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    }<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return inSafeMode;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
-<span class="sourceLineNo">296</span><a name="line.296"></a>
-<span class="sourceLineNo">297</span>  /**<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * Check whether dfs is in safemode.<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param conf<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @throws IOException<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   */<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  public static void checkDfsSafeMode(final Configuration conf)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    boolean isInSafeMode = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    FileSystem fs = FileSystem.get(conf);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    if (fs instanceof DistributedFileSystem) {<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      isInSafeMode = isInSafeMode(dfs);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    }<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    if (isInSafeMode) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throw new IOException("File system is in safemode, it can't be written now");<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>  }<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>  /**<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * Verifies current version of file system<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   *<a name="line.317"></a>
-<span class="sourceLineNo">318</span>   * @param fs filesystem object<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * @param rootdir root hbase directory<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @return null if no version file exists, version string otherwise.<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @throws IOException e<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  public static String getVersion(FileSystem fs, Path rootdir)<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  throws IOException, DeserializationException {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    FileStatus[] status = null;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    try {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // hadoop 2.0 throws FNFE if directory does not exist.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // hadoop 1.0 returns null if directory does not exist.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      status = fs.listStatus(versionFile);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    } catch (FileNotFoundException fnfe) {<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      return null;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    }<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    if (status == null || status.length == 0) return null;<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    String version = null;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    byte [] content = new byte [(int)status[0].getLen()];<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    FSDataInputStream s = fs.open(versionFile);<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    try {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>      IOUtils.readFully(s, content, 0, content.length);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      if (ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        version = parseVersionFrom(content);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      } else {<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        // Presume it pre-pb format.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        InputStream is = new ByteArrayInputStream(content);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        DataInputStream dis = new DataInputStream(is);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        try {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          version = dis.readUTF();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        } finally {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>          dis.close();<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        }<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      }<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    } catch (EOFException eof) {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      LOG.warn("Version file was empty, odd, will try to set it.");<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    } finally {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      s.close();<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    }<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    return version;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  /**<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * Parse the content of the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param bytes The byte content of the hbase.version file.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @return The version found in the file as a String.<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * @throws DeserializationException<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  static String parseVersionFrom(final byte [] bytes)<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  throws DeserializationException {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    ProtobufUtil.expectPBMagicPrefix(bytes);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    try {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);<a name="line.374"></a>
-<span class="sourceLineNo">375</span>      return builder.getVersion();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    } catch (IOException e) {<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      // Convert<a name="line.377"></a>
-<span class="sourceLineNo">378</span>      throw new DeserializationException(e);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
-<span class="sourceLineNo">381</span><a name="line.381"></a>
-<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * Create the content to write into the ${HBASE_ROOTDIR}/hbase.version file.<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param version Version to persist<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @return Serialized protobuf with &lt;code&gt;version&lt;/code&gt; content and a bit of pb magic for a prefix.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  static byte [] toVersionByteArray(final String version) {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    FSProtos.HBaseVersionFileContent.Builder builder =<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      FSProtos.HBaseVersionFileContent.newBuilder();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>  /**<a name="line.393"></a>
-<span class="sourceLineNo">394</span>   * Verifies current version of file system<a name="line.394"></a>
-<span class="sourceLineNo">395</span>   *<a name="line.395"></a>
-<span class="sourceLineNo">396</span>   * @param fs file system<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * @param rootdir root directory of HBase installation<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param message if true, issues a message on System.out<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   * @throws IOException e<a name="line.400"></a>
-<span class="sourceLineNo">401</span>   * @throws DeserializationException<a name="line.401"></a>
-<span class="sourceLineNo">402</span>   */<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  public static void checkVersion(FileSystem fs, Path rootdir, boolean message)<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  throws IOException, DeserializationException {<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.405"></a>
-<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
-<span class="sourceLineNo">407</span><a name="line.407"></a>
-<span class="sourceLineNo">408</span>  /**<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   * Verifies current version of file system<a name="line.409"></a>
-<span class="sourceLineNo">410</span>   *<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @param fs file system<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   * @param rootdir root directory of HBase installation<a name="line.412"></a>
-<span class="sourceLineNo">413</span>   * @param message if true, issues a message on System.out<a name="line.413"></a>
-<span class="sourceLineNo">414</span>   * @param wait wait interval<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @param retries number of times to retry<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   * @throws IOException e<a name="line.417"></a>
-<span class="sourceLineNo">418</span>   * @throws DeserializationException<a name="line.418"></a>
-<span class="sourceLineNo">419</span>   */<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  public static void checkVersion(FileSystem fs, Path rootdir,<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      boolean message, int wait, int retries)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  throws IOException, DeserializationException {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    String version = getVersion(fs, rootdir);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    if (version == null) {<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      if (!metaRegionExists(fs, rootdir)) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        // rootDir is empty (no version file and no root region)<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // just create new version file (HBASE-1195)<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        setVersion(fs, rootdir, wait, retries);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>        return;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      }<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;<a name="line.431"></a>
-<span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    // version is deprecated require migration<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    // Output on stdout so user sees it in terminal.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    String msg = "HBase file layout needs to be upgraded."<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      + " You have version " + version<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      + " and I want version " + HConstants.FILE_SYSTEM_VERSION<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      + ". Consult http://hbase.apache.org/book.html for further information about upgrading HBase."<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      + " Is your hbase.rootdir valid? If so, you may need to run "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      + "'hbase hbck -fixVersionFile'.";<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    if (message) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>      System.out.println("WARNING! " + msg);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    throw new FileSystemVersionException(msg);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  }<a name="line.445"></a>
-<span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>  /**<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   * Sets version of file system<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   *<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   * @param fs filesystem object<a name="line.450"></a>
-<span class="sourceLineNo">451</span>   * @param rootdir hbase root<a name="line.451"></a>
-<span class="sourceLineNo">452</span>   * @throws IOException e<a name="line.452"></a>
-<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  public static void setVersion(FileSystem fs, Path rootdir)<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  throws IOException {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>      HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);<a name="line.457"></a>
-<span class="sourceLineNo">458</span>  }<a name="line.458"></a>
-<span class="sourceLineNo">459</span><a name="line.459"></a>
-<span class="sourceLineNo">460</span>  /**<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * Sets version of file system<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   *<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * @param fs filesystem object<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * @param rootdir hbase root<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * @param wait time to wait for retry<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   * @param retries number of times to retry before failing<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * @throws IOException e<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   */<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)<a name="line.469"></a>
-<span class="sourceLineNo">470</span>  throws IOException {<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  }<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>  /**<a name="line.475"></a>
-<span class="sourceLineNo">476</span>   * Sets version of file system<a name="line.476"></a>
-<span class="sourceLineNo">477</span>   *<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * @param fs filesystem object<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @param rootdir hbase root directory<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   * @param version version to set<a name="line.480"></a>
-<span class="sourceLineNo">481</span>   * @param wait time to wait for retry<a name="line.481"></a>
-<span class="sourceLineNo">482</span>   * @param retries number of times to retry before throwing an IOException<a name="line.482"></a>
-<span class="sourceLineNo">483</span>   * @throws IOException e<a name="line.483"></a>
-<span class="sourceLineNo">484</span>   */<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  public static void setVersion(FileSystem fs, Path rootdir, String version,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>      int wait, int retries) throws IOException {<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    Path tempVersionFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY + Path.SEPARATOR +<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      HConstants.VERSION_FILE_NAME);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    while (true) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      try {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        // Write the version to a temporary file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        FSDataOutputStream s = fs.create(tempVersionFile);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        try {<a name="line.494"></a>
-<span class="sourceLineNo">495</span>          s.write(toVersionByteArray(version));<a name="line.495"></a>
-<span class="sourceLineNo">496</span>          s.close();<a name="line.496"></a>
-<span class="sourceLineNo">497</span>          s = null;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>          // Move the temp version file to its normal location. Returns false<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          // if the rename failed. Throw an IOE in that case.<a name="line.499"></a>
-<span class="sourceLineNo">500</span>          if (!fs.rename(tempVersionFile, versionFile)) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>            throw new IOException("Unable to move temp version file to " + versionFile);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        } finally {<a name="line.503"></a>
-<span class="sourceLineNo">504</span>          // Cleaning up the temporary if the rename failed would be trying<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          // too hard. We'll unconditionally create it again the next time<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          // through anyway, files are overwritten by default by create().<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>          // Attempt to close the stream on the way out if it is still open.<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          try {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>            if (s != null) s.close();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          } catch (IOException ignore) { }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        LOG.info("Created version file at " + rootdir.toString() + " with version=" + version);<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        return;<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      } catch (IOException e) {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        if (retries &gt; 0) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>          LOG.debug("Unable to create version file at " + rootdir.toString() + ", retrying", e);<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          fs.delete(versionFile, false);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>          try {<a name="line.519"></a>
-<span class="sourceLineNo">520</span>            if (wait &gt; 0) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>              Thread.sleep(wait);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>            }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>          } catch (InterruptedException ie) {<a name="line.523"></a>
-<span class="sourceLineNo">524</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(ie);<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          retries--;<a name="line.526"></a>
-<span class="sourceLineNo">527</span>        } else {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          throw e;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  }<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>  /**<a name="line.534"></a>
-<span class="sourceLineNo">535</span>   * Checks that a cluster ID file exists in the HBase root directory<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   * @param fs the root directory FileSystem<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * @param rootdir the HBase root directory in HDFS<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * @param wait how long to wait between retries<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * @return &lt;code&gt;true&lt;/code&gt; if the file exists, otherwise &lt;code&gt;false&lt;/code&gt;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @throws IOException if checking the FileSystem fails<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      int wait) throws IOException {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    while (true) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      try {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>        Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        return fs.exists(filePath);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      } catch (IOException ioe) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>        if (wait &gt; 0) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>          LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +<a name="line.550"></a>
-<span class="sourceLineNo">551</span>              ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>            Thread.sleep(wait);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          } catch (InterruptedException e) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        } else {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          throw ioe;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Returns the value of the unique cluster ID stored for this HBase instance.<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * @param fs the root directory FileSystem<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   * @param rootdir the path to the HBase root directory<a name="line.567"></a>
-<span class="sourceLineNo">568</span>   * @return the unique cluster identifier<a name="line.568"></a>
-<span class="sourceLineNo">569</span>   * @throws IOException if reading the cluster ID file fails<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   */<a name="line.570"></a>
-<span class="sourceLineNo">571</span>  public static ClusterId getClusterId(FileSystem fs, Path rootdir)<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  throws IOException {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    ClusterId clusterId = null;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath):  null;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    if (status != null) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      int len = Ints.checkedCast(status.getLen());<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      byte [] content = new byte[len];<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      FSDataInputStream in = fs.open(idPath);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      try {<a name="line.580"></a>
-<span class="sourceLineNo">581</span>        in.readFully(content);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      } catch (EOFException eof) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>        LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      } finally{<a name="line.584"></a>
-<span class="sourceLineNo">585</span>        in.close();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      }<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      try {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>        clusterId = ClusterId.parseFrom(content);<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      } catch (DeserializationException e) {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>        throw new IOException("content=" + Bytes.toString(content), e);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      }<a name="line.591"></a>
-<span class="sourceLineNo">592</span>      // If not pb'd, make it so.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      if (!ProtobufUtil.isPBMagicPrefix(content)) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>        String cid = null;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        in = fs.open(idPath);<a name="line.595"></a>
-<span class="sourceLineNo">596</span>        try {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          cid = in.readUTF();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          clusterId = new ClusterId(cid);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        } catch (EOFException eof) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          LOG.warn("Cluster ID file " + idPath.toString() + " was empty");<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } finally {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          in.close();<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        rewriteAsPb(fs, rootdir, idPath, clusterId);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      return clusterId;<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    } else {<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      LOG.warn("Cluster ID file does not exist at " + idPath.toString());<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    }<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    return clusterId;<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
-<span class="sourceLineNo">612</span><a name="line.612"></a>
-<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
-<span class="sourceLineNo">614</span>   * @param cid<a name="line.614"></a>
-<span class="sourceLineNo">615</span>   * @throws IOException<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   */<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      final ClusterId cid)<a name="line.618"></a>
-<span class="sourceLineNo">619</span>  throws IOException {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    // Rewrite the file as pb.  Move aside the old one first, write new<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    // then delete the moved-aside file.<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    Path movedAsideName = new Path(p + "." + System.currentTimeMillis());<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    setClusterId(fs, rootdir, cid, 100);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    if (!fs.delete(movedAsideName, false)) {<a name="line.625"></a>
-<span class="sourceLineNo">626</span>      throw new IOException("Failed delete of " + movedAsideName);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    }<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    LOG.debug("Rewrote the hbase.id file as pb");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>  }<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>  /**<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * Writes a new unique identifier for this cluster to the "hbase.id" file<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * in the HBase root directory<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   * @param fs the root directory FileSystem<a name="line.634"></a>
-<span class="sourceLineNo">635</span>   * @param rootdir the path to the HBase root directory<a name="line.635"></a>
-<span class="sourceLineNo">636</span>   * @param clusterId the unique identifier to store<a name="line.636"></a>
-<span class="sourceLineNo">637</span>   * @param wait how long (in milliseconds) to wait between retries<a name="line.637"></a>
-<span class="sourceLineNo">638</span>   * @throws IOException if writing to the FileSystem fails and no wait value<a name="line.638"></a>
-<span class="sourceLineNo">639</span>   */<a name="line.639"></a>
-<span class="sourceLineNo">640</span>  public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      int wait) throws IOException {<a name="line.641"></a>
-<span class="sourceLineNo">642</span>    while (true) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      try {<a name="line.643"></a>
-<span class="sourceLineNo">644</span>        Path idFile = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        Path tempIdFile = new Path(rootdir, HConstants.HBASE_TEMP_DIRECTORY +<a name="line.645"></a>
-<span class="sourceLineNo">646</span>          Path.SEPARATOR + HConstants.CLUSTER_ID_FILE_NAME);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>        // Write the id file to a temporary location<a name="line.647"></a>
-<span class="sourceLineNo">648</span>        FSDataOutputStream s = fs.create(tempIdFile);<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        try {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>          s.write(clusterId.toByteArray());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>          s.close();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>          s = null;<a name="line.652"></a>
-<span class="sourceLineNo">653</span>          // Move the temporary file to its normal location. Throw an IOE if<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          // the rename failed<a name="line.654"></a>
-<span class="sourceLineNo">655</span>          if (!fs.rename(tempIdFile, idFile)) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>            throw new IOException("Unable to move temp version file to " + idFile);<a name="line.656"></a>
-<span class="sourceLineNo">657</span>          }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        } finally {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>          // Attempt to close the stream if still open on the way out<a name="line.659"></a>
-<span class="sourceLineNo">660</span>          try {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>            if (s != null) s.close();<a name="line.661"></a>
-<span class="sourceLineNo">662</span>          } catch (IOException ignore) { }<a name="line.662"></a>
-<span class="sourceLineNo">663</span>        }<a name="line.663"></a>
-<span class="sourceLineNo">664</span>        if (LOG.isDebugEnabled()) {<a name="line.664"></a>
-<span class="sourceLineNo">665</span>          LOG.debug("Created cluster ID file at " + idFile.toString() + " with ID: " + clusterId);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return;<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      } catch (IOException ioe) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>        if (wait &gt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));<a name="line.671"></a>
-<span class="sourceLineNo">672</span>          try {<a name="line.672"></a>
-<span class="sourceLineNo">673</span>            Thread.sleep(wait);<a name="line.673"></a>
-<span class="sourceLineNo">674</span>          } catch (InterruptedException e) {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>            throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.675"></a>
-<span class="sourceLineNo">676</span>          }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>        } else {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>          throw ioe;<a name="line.678"></a>
-<span class="sourceLineNo">679</span>        }<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      }<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    }<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>  /**<a name="line.684"></a>
-<span class="sourceLineNo">685</span>   * If DFS, check safe mode and if so, wait until we clear it.<a name="line.685"></a>
-<span class="sourceLineNo">686</span>   * @param conf configuration<a name="line.686"></a>
-<span class="sourceLineNo">687</span>   * @param wait Sleep between retries<a name="line.687"></a>
-<span class="sourceLineNo">688</span>   * @throws IOException e<a name="line.688"></a>
-<span class="sourceLineNo">689</span>   */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void waitOnSafeMode(final Configuration conf,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    final long wait)<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  throws IOException {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    FileSystem fs = FileSystem.get(conf);<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    if (!(fs instanceof DistributedFileSystem)) return;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    DistributedFileSystem dfs = (DistributedFileSystem)fs;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    // Make sure dfs is not in safe mode<a name="line.696"></a>
-<span class="sourceLineNo">697</span>    while (isInSafeMode(dfs)) {<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      LOG.info("Waiting for dfs to exit safe mode...");<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      try {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>        Thread.sleep(wait);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      } catch (InterruptedException e) {<a name="line.701"></a>
-<span class="sourceLineNo">702</span>        throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    }<a name="line.704"></a>
-<span class="sourceLineNo">705</span>  }<a name="line.705"></a>
-<span class="sourceLineNo">706</span><a name="line.706"></a>
-<span class="sourceLineNo">707</span>  /**<a name="line.707"></a>
-<span class="sourceLineNo">708</span>   * Checks if meta region exists<a name="line.708"></a>
-<span class="sourceLineNo">709</span>   *<a name="line.709"></a>
-<span class="sourceLineNo">710</span>   * @param fs file system<a name="line.710"></a>
-<span class="sourceLineNo">711</span>   * @param rootdir root directory of HBase installation<a name="line.711"></a>
-<span class="sourceLineNo">712</span>   * @return true if exists<a name="line.712"></a>
-<span class="sourceLineNo">713</span>   * @throws IOException e<a name="line.713"></a>
-<span class="sourceLineNo">714</span>   */<a name="line.714"></a>
-<span class="sourceLineNo">715</span>  @SuppressWarnings("deprecation")<a name="line.715"></a>
-<span class="sourceLineNo">716</span>  public static boolean metaRegionExists(FileSystem fs, Path rootdir)<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  throws IOException {<a name="line.717"></a>
-<span class="sourceLineNo">718</span>    Path metaRegionDir =<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return fs.exists(metaRegionDir);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  /**<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * Compute HDFS blocks distribution of a given file, or a portion of the file<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   * @param fs file system<a name="line.725"></a>
-<span class="sourceLineNo">726</span>   * @param status file status of the file<a name="line.726"></a>
-<span class="sourceLineNo">727</span>   * @param start start position of the portion<a name="line.727"></a>
-<span class="sourceLineNo">728</span>   * @param length length of the portion<a name="line.728"></a>
-<span class="sourceLineNo">729</span>   * @return The HDFS blocks distribution<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   */<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  static public HDFSBlocksDistribution computeHDFSBlocksDistribution(<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    final FileSystem fs, FileStatus status, long start, long length)<a name="line.732"></a>
-<span class="sourceLineNo">733</span>    throws IOException {<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    BlockLocation [] blockLocations =<a name="line.735"></a>
-<span class="sourceLineNo">736</span>      fs.getFileBlockLocations(status, start, length);<a name="line.736"></a>
-<span class="sourceLineNo">737</span>    for(BlockLocation bl : blockLocations) {<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      String [] hosts = bl.getHosts();<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      long len = bl.getLength();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      blocksDistribution.addHostsAndBlockWeight(hosts, len);<a name="line.740"></a>
-<span class="sourceLineNo">741</

<TRUNCATED>

[29/29] hbase-site git commit: Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.

Posted by gi...@apache.org.
Published site at c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/ead846d7
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/ead846d7
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/ead846d7

Branch: refs/heads/asf-site
Commit: ead846d718b974c9cf4c9db9764271a41039a249
Parents: 4a5d182
Author: jenkins <bu...@apache.org>
Authored: Fri May 18 14:47:28 2018 +0000
Committer: jenkins <bu...@apache.org>
Committed: Fri May 18 14:47:28 2018 +0000

----------------------------------------------------------------------
 acid-semantics.html                             |    4 +-
 apache_hbase_reference_guide.pdf                |  138 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.html   |   22 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.html   | 2158 +++++------
 book.html                                       |   10 +-
 bulk-loads.html                                 |    4 +-
 checkstyle-aggregate.html                       |  174 +-
 coc.html                                        |    4 +-
 dependencies.html                               |    4 +-
 dependency-convergence.html                     |    4 +-
 dependency-info.html                            |    4 +-
 dependency-management.html                      |    8 +-
 devapidocs/constant-values.html                 |   41 +-
 devapidocs/index-all.html                       |   22 +-
 .../hadoop/hbase/backup/package-tree.html       |    4 +-
 .../hadoop/hbase/client/package-tree.html       |   24 +-
 .../hadoop/hbase/filter/package-tree.html       |    8 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |    6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |    2 +-
 .../hadoop/hbase/mapreduce/package-tree.html    |    4 +-
 .../hadoop/hbase/master/package-tree.html       |    6 +-
 .../hbase/master/procedure/package-tree.html    |    4 +-
 .../org/apache/hadoop/hbase/package-tree.html   |   14 +-
 .../hadoop/hbase/procedure2/package-tree.html   |    2 +-
 .../hadoop/hbase/quotas/package-tree.html       |    8 +-
 .../SplitLogWorker.TaskExecutor.Status.html     |   14 +-
 .../SplitLogWorker.TaskExecutor.html            |    4 +-
 .../hbase/regionserver/SplitLogWorker.html      |   26 +-
 .../hadoop/hbase/regionserver/package-tree.html |   12 +-
 .../regionserver/querymatcher/package-tree.html |    4 +-
 .../hbase/regionserver/wal/package-tree.html    |    2 +-
 .../hadoop/hbase/rest/model/package-tree.html   |    2 +-
 .../hbase/security/access/package-tree.html     |    2 +-
 .../hbase/snapshot/ExportSnapshot.Counter.html  |   20 +-
 .../snapshot/ExportSnapshot.ExportMapper.html   |   64 +-
 ...hotInputFormat.ExportSnapshotInputSplit.html |   20 +-
 ...tInputFormat.ExportSnapshotRecordReader.html |   24 +-
 ...xportSnapshot.ExportSnapshotInputFormat.html |    8 +-
 .../hbase/snapshot/ExportSnapshot.Options.html  |   28 +-
 .../hbase/snapshot/ExportSnapshot.Testing.html  |   12 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.html   |  218 +-
 .../hadoop/hbase/thrift/package-tree.html       |    2 +-
 .../apache/hadoop/hbase/util/FSHDFSUtils.html   |    2 +-
 .../hbase/util/FSUtils.BlackListDirFilter.html  |   12 +-
 .../hadoop/hbase/util/FSUtils.DirFilter.html    |    4 +-
 .../hbase/util/FSUtils.FamilyDirFilter.html     |    8 +-
 .../hadoop/hbase/util/FSUtils.FileFilter.html   |    8 +-
 .../hadoop/hbase/util/FSUtils.HFileFilter.html  |    8 +-
 .../hbase/util/FSUtils.HFileLinkFilter.html     |    6 +-
 .../hbase/util/FSUtils.ReferenceFileFilter.html |    8 +-
 .../hbase/util/FSUtils.RegionDirFilter.html     |   10 +-
 .../hbase/util/FSUtils.UserTableDirFilter.html  |    6 +-
 .../org/apache/hadoop/hbase/util/FSUtils.html   |  252 +-
 .../apache/hadoop/hbase/util/package-tree.html  |    8 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |    2 +-
 .../org/apache/hadoop/hbase/Version.html        |    6 +-
 .../SplitLogWorker.TaskExecutor.Status.html     |  378 +-
 .../SplitLogWorker.TaskExecutor.html            |  378 +-
 .../hbase/regionserver/SplitLogWorker.html      |  378 +-
 .../hbase/snapshot/ExportSnapshot.Counter.html  | 2158 +++++------
 .../snapshot/ExportSnapshot.ExportMapper.html   | 2158 +++++------
 ...hotInputFormat.ExportSnapshotInputSplit.html | 2158 +++++------
 ...tInputFormat.ExportSnapshotRecordReader.html | 2158 +++++------
 ...xportSnapshot.ExportSnapshotInputFormat.html | 2158 +++++------
 .../hbase/snapshot/ExportSnapshot.Options.html  | 2158 +++++------
 .../hbase/snapshot/ExportSnapshot.Testing.html  | 2158 +++++------
 .../hadoop/hbase/snapshot/ExportSnapshot.html   | 2158 +++++------
 .../hbase/util/FSUtils.BlackListDirFilter.html  | 3427 +++++++++---------
 .../hadoop/hbase/util/FSUtils.DirFilter.html    | 3427 +++++++++---------
 .../hbase/util/FSUtils.FamilyDirFilter.html     | 3427 +++++++++---------
 .../hadoop/hbase/util/FSUtils.FileFilter.html   | 3427 +++++++++---------
 .../hadoop/hbase/util/FSUtils.HFileFilter.html  | 3427 +++++++++---------
 .../hbase/util/FSUtils.HFileLinkFilter.html     | 3427 +++++++++---------
 .../hbase/util/FSUtils.ReferenceFileFilter.html | 3427 +++++++++---------
 .../hbase/util/FSUtils.RegionDirFilter.html     | 3427 +++++++++---------
 .../hbase/util/FSUtils.UserTableDirFilter.html  | 3427 +++++++++---------
 .../org/apache/hadoop/hbase/util/FSUtils.html   | 3427 +++++++++---------
 downloads.html                                  |    4 +-
 export_control.html                             |    4 +-
 index.html                                      |    4 +-
 integration.html                                |    4 +-
 issue-tracking.html                             |    4 +-
 license.html                                    |    4 +-
 mail-lists.html                                 |    4 +-
 metrics.html                                    |    4 +-
 old_news.html                                   |    4 +-
 plugin-management.html                          |    4 +-
 plugins.html                                    |    4 +-
 poweredbyhbase.html                             |    4 +-
 project-info.html                               |    4 +-
 project-reports.html                            |    4 +-
 project-summary.html                            |    4 +-
 pseudo-distributed.html                         |    4 +-
 replication.html                                |    4 +-
 resources.html                                  |    4 +-
 source-repository.html                          |    4 +-
 sponsors.html                                   |    4 +-
 supportingprojects.html                         |    4 +-
 team-list.html                                  |    4 +-
 testdevapidocs/index-all.html                   |    2 +
 .../hadoop/hbase/io/hfile/package-tree.html     |    2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |   10 +-
 .../hadoop/hbase/regionserver/package-tree.html |    6 +-
 .../apache/hadoop/hbase/test/package-tree.html  |    2 +-
 .../apache/hadoop/hbase/util/TestFSUtils.html   |  100 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |    4 +-
 .../apache/hadoop/hbase/util/TestFSUtils.html   | 1085 +++---
 107 files changed, 29157 insertions(+), 28291 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/acid-semantics.html
----------------------------------------------------------------------
diff --git a/acid-semantics.html b/acid-semantics.html
index 673f227..3d348d2 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180517" />
+    <meta name="Date-Revision-yyyymmdd" content="20180518" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) ACID Properties
@@ -601,7 +601,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-05-17</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-05-18</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/apache_hbase_reference_guide.pdf
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index bb7d4d7..909dad9 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20180517142955+00'00')
-/CreationDate (D:20180517144455+00'00')
+/ModDate (D:20180518144444+00'00')
+/CreationDate (D:20180518144444+00'00')
 >>
 endobj
 2 0 obj
@@ -372585,35 +372585,35 @@ f
 f
 0.0 0.0 0.0 scn
 1.0 1.0 1.0 scn
-48.24 669.41 83.133 101.92 re
+48.24 657.42 83.133 113.91 re
 f
 0.0 0.0 0.0 scn
 1.0 1.0 1.0 scn
-131.373 669.41 41.5665 101.92 re
+131.373 657.42 41.5665 113.91 re
 f
 0.0 0.0 0.0 scn
 1.0 1.0 1.0 scn
-172.9395 669.41 124.7 101.92 re
+172.9395 657.42 124.7 113.91 re
 f
 0.0 0.0 0.0 scn
 1.0 1.0 1.0 scn
-297.6395 669.41 249.4005 101.92 re
+297.6395 657.42 249.4005 113.91 re
 f
 0.0 0.0 0.0 scn
 0.9765 0.9765 0.9765 scn
-48.24 555.5 83.133 113.91 re
+48.24 555.5 83.133 101.92 re
 f
 0.0 0.0 0.0 scn
 0.9765 0.9765 0.9765 scn
-131.373 555.5 41.5665 113.91 re
+131.373 555.5 41.5665 101.92 re
 f
 0.0 0.0 0.0 scn
 0.9765 0.9765 0.9765 scn
-172.9395 555.5 124.7 113.91 re
+172.9395 555.5 124.7 101.92 re
 f
 0.0 0.0 0.0 scn
 0.9765 0.9765 0.9765 scn
-297.6395 555.5 249.4005 113.91 re
+297.6395 555.5 249.4005 101.92 re
 f
 0.0 0.0 0.0 scn
 1.0 1.0 1.0 scn
@@ -372804,20 +372804,20 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-48.24 669.41 m
-131.373 669.41 l
+48.24 657.42 m
+131.373 657.42 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 48.24 771.58 m
-48.24 669.16 l
+48.24 657.17 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 131.373 771.58 m
-131.373 669.16 l
+131.373 657.17 l
 S
 [] 0 d
 1 w
@@ -372853,20 +372853,20 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-131.373 669.41 m
-172.9395 669.41 l
+131.373 657.42 m
+172.9395 657.42 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 131.373 771.58 m
-131.373 669.16 l
+131.373 657.17 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 172.9395 771.58 m
-172.9395 669.16 l
+172.9395 657.17 l
 S
 [] 0 d
 1 w
@@ -372888,20 +372888,20 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-172.9395 669.41 m
-297.6395 669.41 l
+172.9395 657.42 m
+297.6395 657.42 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 172.9395 771.58 m
-172.9395 669.16 l
+172.9395 657.17 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 297.6395 771.58 m
-297.6395 669.16 l
+297.6395 657.17 l
 S
 [] 0 d
 1 w
@@ -372911,21 +372911,21 @@ S
 BT
 175.9395 757.116 Td
 /F1.0 10.5 Tf
-<4372656174652061206e6577207461626c652c206f72> Tj
+<55706461746520616e206578697374696e67> Tj
 ET
 
 
 BT
 175.9395 742.836 Td
 /F1.0 10.5 Tf
-<7265706c61636520616e206578697374696e67> Tj
+<7461626c652077697468207468652070726f7669646564> Tj
 ET
 
 
 BT
 175.9395 728.556 Td
 /F1.0 10.5 Tf
-<7461626c65d57320736368656d61> Tj
+[<736368656d61206672> 20.0195 <61676d656e74>] TJ
 ET
 
 0.0 0.0 0.0 scn
@@ -372937,20 +372937,20 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-297.6395 669.41 m
-547.04 669.41 l
+297.6395 657.42 m
+547.04 657.42 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 297.6395 771.58 m
-297.6395 669.16 l
+297.6395 657.17 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
 547.04 771.58 m
-547.04 669.16 l
+547.04 657.17 l
 S
 [] 0 d
 1 w
@@ -373002,21 +373002,28 @@ ET
 BT
 300.6395 686.93 Td
 /F4.0 11 Tf
-<2f3e266c743b2f5461626c65536368656d613e27205c> Tj
+<4b4545505f44454c455445445f43454c4c533d227472756522> Tj
 ET
 
 
 BT
 300.6395 674.94 Td
 /F4.0 11 Tf
+<2f3e266c743b2f5461626c65536368656d613e27205c> Tj
+ET
+
+
+BT
+300.6395 662.95 Td
+/F4.0 11 Tf
 <ca2022687474703a2f2f6578616d706c652e636f6d3a383030302f75736572732f736368656d6122> Tj
 ET
 
 0.0 0.0 0.0 scn
 0.5 w
 0.8667 0.8667 0.8667 SCN
-48.24 669.41 m
-131.373 669.41 l
+48.24 657.42 m
+131.373 657.42 l
 S
 [] 0 d
 0.5 w
@@ -373027,13 +373034,13 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-48.24 669.66 m
+48.24 657.67 m
 48.24 555.25 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-131.373 669.66 m
+131.373 657.67 m
 131.373 555.25 l
 S
 [] 0 d
@@ -373042,21 +373049,21 @@ S
 0.6941 0.1294 0.2745 scn
 
 BT
-51.24 657.38 Td
+51.24 645.39 Td
 /F4.0 10.5 Tf
 <2f> Tj
 ET
 
 
 BT
-56.49 657.38 Td
+56.49 645.39 Td
 /F8.0 10.5 Tf
 <7461626c65> Tj
 ET
 
 
 BT
-82.74 657.38 Td
+82.74 645.39 Td
 /F4.0 10.5 Tf
 <2f736368656d61> Tj
 ET
@@ -373064,8 +373071,8 @@ ET
 0.0 0.0 0.0 scn
 0.5 w
 0.8667 0.8667 0.8667 SCN
-131.373 669.41 m
-172.9395 669.41 l
+131.373 657.42 m
+172.9395 657.42 l
 S
 [] 0 d
 0.5 w
@@ -373076,13 +373083,13 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-131.373 669.66 m
+131.373 657.67 m
 131.373 555.25 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-172.9395 669.66 m
+172.9395 657.67 m
 172.9395 555.25 l
 S
 [] 0 d
@@ -373091,7 +373098,7 @@ S
 0.6941 0.1294 0.2745 scn
 
 BT
-134.373 657.38 Td
+134.373 645.39 Td
 /F4.0 10.5 Tf
 <505554> Tj
 ET
@@ -373099,8 +373106,8 @@ ET
 0.0 0.0 0.0 scn
 0.5 w
 0.8667 0.8667 0.8667 SCN
-172.9395 669.41 m
-297.6395 669.41 l
+172.9395 657.42 m
+297.6395 657.42 l
 S
 [] 0 d
 0.5 w
@@ -373111,13 +373118,13 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-172.9395 669.66 m
+172.9395 657.67 m
 172.9395 555.25 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-297.6395 669.66 m
+297.6395 657.67 m
 297.6395 555.25 l
 S
 [] 0 d
@@ -373126,30 +373133,30 @@ S
 0.2 0.2 0.2 scn
 
 BT
-175.9395 655.196 Td
+175.9395 643.206 Td
 /F1.0 10.5 Tf
-<55706461746520616e206578697374696e67> Tj
+<4372656174652061206e6577207461626c652c206f72> Tj
 ET
 
 
 BT
-175.9395 640.916 Td
+175.9395 628.926 Td
 /F1.0 10.5 Tf
-<7461626c652077697468207468652070726f7669646564> Tj
+<7265706c61636520616e206578697374696e67> Tj
 ET
 
 
 BT
-175.9395 626.636 Td
+175.9395 614.646 Td
 /F1.0 10.5 Tf
-[<736368656d61206672> 20.0195 <61676d656e74>] TJ
+<7461626c65d57320736368656d61> Tj
 ET
 
 0.0 0.0 0.0 scn
 0.5 w
 0.8667 0.8667 0.8667 SCN
-297.6395 669.41 m
-547.04 669.41 l
+297.6395 657.42 m
+547.04 657.42 l
 S
 [] 0 d
 0.5 w
@@ -373160,13 +373167,13 @@ S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-297.6395 669.66 m
+297.6395 657.67 m
 297.6395 555.25 l
 S
 [] 0 d
 0.5 w
 0.8667 0.8667 0.8667 SCN
-547.04 669.66 m
+547.04 657.67 m
 547.04 555.25 l
 S
 [] 0 d
@@ -373175,51 +373182,44 @@ S
 0.2 0.2 0.2 scn
 
 BT
-300.6395 656.95 Td
-/F4.0 11 Tf
-<6375726c202d7669202d5820505554205c> Tj
-ET
-
-
-BT
 300.6395 644.96 Td
 /F4.0 11 Tf
-<ca202d4820224163636570743a20746578742f786d6c22205c> Tj
+<6375726c202d7669202d5820505554205c> Tj
 ET
 
 
 BT
 300.6395 632.97 Td
 /F4.0 11 Tf
-<ca202d482022436f6e74656e742d547970653a20746578742f786d6c22205c> Tj
+<ca202d4820224163636570743a20746578742f786d6c22205c> Tj
 ET
 
 
 BT
 300.6395 620.98 Td
 /F4.0 11 Tf
-<ca202d642027266c743b3f786d6c2076657273696f6e3d22312e302220656e636f64696e673d225554462d> Tj
+<ca202d482022436f6e74656e742d547970653a20746578742f786d6c22205c> Tj
 ET
 
 
 BT
 300.6395 608.99 Td
 /F4.0 11 Tf
-<38223f3e266c743b5461626c65536368656d61> Tj
+<ca202d642027266c743b3f786d6c2076657273696f6e3d22312e302220656e636f64696e673d225554462d> Tj
 ET
 
 
 BT
 300.6395 597.0 Td
 /F4.0 11 Tf
-<6e616d653d227573657273223e266c743b436f6c756d6e536368656d61206e616d653d22636622> Tj
+<38223f3e266c743b5461626c65536368656d61> Tj
 ET
 
 
 BT
 300.6395 585.01 Td
 /F4.0 11 Tf
-<4b4545505f44454c455445445f43454c4c533d227472756522> Tj
+<6e616d653d227573657273223e266c743b436f6c756d6e536368656d61206e616d653d22636622> Tj
 ET
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ead846d7/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
----------------------------------------------------------------------
diff --git a/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html b/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
index 2965ee7..5c18280 100644
--- a/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
+++ b/apidocs/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.88">ExportSnapshot</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.92">ExportSnapshot</a>
 extends org.apache.hadoop.hbase.util.AbstractHBaseTool
 implements org.apache.hadoop.util.Tool</pre>
 <div class="block">Export the specified snapshot to a given FileSystem.
@@ -280,7 +280,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>NAME</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.89">NAME</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.93">NAME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.NAME">Constant Field Values</a></dd>
@@ -293,7 +293,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_SOURCE_PREFIX</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.91">CONF_SOURCE_PREFIX</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.95">CONF_SOURCE_PREFIX</a></pre>
 <div class="block">Configuration prefix for overrides for the source filesystem</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -307,7 +307,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>CONF_DEST_PREFIX</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.93">CONF_DEST_PREFIX</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.97">CONF_DEST_PREFIX</a></pre>
 <div class="block">Configuration prefix for overrides for the destination filesystem</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -321,7 +321,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CONF_SKIP_TMP</h4>
-<pre>protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.111">CONF_SKIP_TMP</a></pre>
+<pre>protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.115">CONF_SKIP_TMP</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.snapshot.ExportSnapshot.CONF_SKIP_TMP">Constant Field Values</a></dd>
@@ -342,7 +342,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ExportSnapshot</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.88">ExportSnapshot</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.92">ExportSnapshot</a>()</pre>
 </li>
 </ul>
 </li>
@@ -359,7 +359,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>processOptions</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.890">processOptions</a>(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine&nbsp;cmd)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.915">processOptions</a>(org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine&nbsp;cmd)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code>org.apache.hadoop.hbase.util.AbstractHBaseTool</code></span></div>
 <div class="block">This method is called to process the options after they have been parsed.</div>
 <dl>
@@ -374,7 +374,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>doWork</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.915">doWork</a>()
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.940">doWork</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Execute the export snapshot by copying the snapshot metadata, hfiles and wals.</div>
 <dl>
@@ -393,7 +393,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>printUsage</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1080">printUsage</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1108">printUsage</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code>printUsage</code>&nbsp;in class&nbsp;<code>org.apache.hadoop.hbase.util.AbstractHBaseTool</code></dd>
@@ -406,7 +406,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>addOptions</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1093">addOptions</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1121">addOptions</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code>org.apache.hadoop.hbase.util.AbstractHBaseTool</code></span></div>
 <div class="block">Override this to add command-line options using <code>AbstractHBaseTool.addOptWithArg(java.lang.String, java.lang.String)</code>
  and similar methods.</div>
@@ -422,7 +422,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>main</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1108">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/snapshot/ExportSnapshot.html#line.1136">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)</pre>
 </li>
 </ul>
 </li>