You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2018/12/05 14:52:55 UTC

[01/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e46798831 -> 275553168


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurr

<TRUNCATED>

[09/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class=

<TRUNCATED>

[49/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/checkstyle.rss
----------------------------------------------------------------------
diff --git a/checkstyle.rss b/checkstyle.rss
index 8560113..195a4f2 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
     <language>en-us</language>
     <copyright>&#169;2007 - 2018 The Apache Software Foundation</copyright>
     <item>
-      <title>File: 3816,
-             Errors: 14791,
+      <title>File: 3817,
+             Errors: 14743,
              Warnings: 0,
              Infos: 0
       </title>
@@ -8651,7 +8651,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  8
+                  7
                 </td>
               </tr>
                           <tr>
@@ -14727,7 +14727,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  74
+                  73
                 </td>
               </tr>
                           <tr>
@@ -16449,7 +16449,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  2
+                  1
                 </td>
               </tr>
                           <tr>
@@ -18717,7 +18717,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  4
+                  3
                 </td>
               </tr>
                           <tr>
@@ -24359,7 +24359,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  1
+                  0
                 </td>
               </tr>
                           <tr>
@@ -29082,6 +29082,20 @@ under the License.
               </tr>
                           <tr>
                 <td>
+                  <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.rsgroup.TestUtility.java">org/apache/hadoop/hbase/rsgroup/TestUtility.java</a>
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+                <td>
+                  0
+                </td>
+              </tr>
+                          <tr>
+                <td>
                   <a href="http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.MetricsRegionAggregateSourceImpl.java">org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java</a>
                 </td>
                 <td>
@@ -39927,7 +39941,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  41
+                  0
                 </td>
               </tr>
                           <tr>
@@ -51911,7 +51925,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  104
+                  102
                 </td>
               </tr>
                           <tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/coc.html
----------------------------------------------------------------------
diff --git a/coc.html b/coc.html
index 5f9e984..241b964 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Code of Conduct Policy
@@ -385,7 +385,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-05</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/dependencies.html
----------------------------------------------------------------------
diff --git a/dependencies.html b/dependencies.html
index 5da6a60..0afcaff 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependencies</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -450,7 +450,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-05</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/dependency-convergence.html
----------------------------------------------------------------------
diff --git a/dependency-convergence.html b/dependency-convergence.html
index d5278d1..ea79107 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Reactor Dependency Convergence</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -680,7 +680,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-05</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/dependency-info.html
----------------------------------------------------------------------
diff --git a/dependency-info.html b/dependency-info.html
index c9d26d1..529e258 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Dependency Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -323,7 +323,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-05</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/dependency-management.html
----------------------------------------------------------------------
diff --git a/dependency-management.html b/dependency-management.html
index ed71234..2a28e09 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependency Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -1009,7 +1009,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-05</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 975521e..ead3e4b 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3831,7 +3831,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Mon Dec  3 14:44:16 UTC 2018"</code></td>
+<td class="colLast"><code>"Wed Dec  5 14:44:22 UTC 2018"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
@@ -3845,7 +3845,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#srcChecksum">srcChecksum</a></code></td>
-<td class="colLast"><code>"6b524fd5dc892868017c9a7a944df62c"</code></td>
+<td class="colLast"><code>"888b7c7a3be5b9ed5222c3804dd3faa1"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.url">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 3c5751e..e353c06 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -6596,15 +6596,15 @@
 </dd>
 <dt><a href="org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a> - Enum in <a href="org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-org.apache.hadoop.conf.Configuration-">blockUntilAvailable(ZKWatcher, long, Configuration)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-org.apache.hadoop.conf.Configuration-">blockUntilAvailable(ZKWatcher, long, Configuration)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Wait until the primary meta region is available.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">blockUntilAvailable(ZKWatcher, long)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">blockUntilAvailable(ZKWatcher, long)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Wait until the meta region is available and is not in transition.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">blockUntilAvailable(ZKWatcher, int, long)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">blockUntilAvailable(ZKWatcher, int, long)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Wait until the meta region is available and is not in transition.</div>
 </dd>
@@ -24942,11 +24942,11 @@
 <dd>
 <div class="block">Deletes merge qualifiers for the specified merged region.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">deleteMetaLocation(ZKWatcher)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">deleteMetaLocation(ZKWatcher)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Deletes the location of <code>hbase:meta</code> in ZooKeeper.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">deleteMetaLocation(ZKWatcher, int)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">deleteMetaLocation(ZKWatcher, int)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HBaseFsck.html#deleteMetaRegion-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">deleteMetaRegion(HBaseFsck.HbckInfo)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a></dt>
 <dd>
@@ -36318,7 +36318,7 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html#getCachedBlockStatsByFile--">getCachedBlockStatsByFile()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/BlockCacheUtil.CachedBlocksByFile.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheUtil.CachedBlocksByFile</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection(ClusterConnection, ServerName)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/Utility.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection(ClusterConnection, ServerName)</a></span> - Static method in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/Utility.html" title="class in org.apache.hadoop.hbase.rsgroup">Utility</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/LruBlockCache.html#getCachedFileNamesForTest--">getCachedFileNamesForTest()</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a></dt>
 <dd>
@@ -42003,7 +42003,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/MetaTableAccessor.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos(List&lt;Pair&lt;RegionInfo, ServerName&gt;&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/MetaTableAccessor.html" title="class in org.apache.hadoop.hbase">MetaTableAccessor</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos(List&lt;Pair&lt;RegionInfo, ServerName&gt;&gt;)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos(List&lt;Pair&lt;RegionInfo, ServerName&gt;&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/ReplicationTracker.html#getListOfRegionServers--">getListOfRegionServers()</a></span> - Method in interface org.apache.hadoop.hbase.replication.<a href="org/apache/hadoop/hbase/replication/ReplicationTracker.html" title="interface in org.apache.hadoop.hbase.replication">ReplicationTracker</a></dt>
 <dd>
@@ -43318,30 +43318,28 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/ZKAsyncRegistry.html#getMetaRegionLocation--">getMetaRegionLocation()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/ZKAsyncRegistry.html" title="class in org.apache.hadoop.hbase.client">ZKAsyncRegistry</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionLocation(ZKWatcher)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionLocation(ZKWatcher)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Gets the meta region location, if available.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionLocation(ZKWatcher, int)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionLocation(ZKWatcher, int)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Gets the meta region location, if available.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegions(ZKWatcher)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegions(ZKWatcher)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Gets the meta regions for the given path with the default replica ID.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegions(ZKWatcher, int)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegions(ZKWatcher, int)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Gets the meta regions for the given path and replica ID.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionsAndLocations(ZKWatcher)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionsAndLocations(ZKWatcher)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionsAndLocations(ZKWatcher, int)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionsAndLocations(ZKWatcher, int)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Gets the meta regions and their locations for the given path and replica ID.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HBaseFsck.html#getMetaRegionServerName-int-">getMetaRegionServerName(int)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/assignment/AssignmentManager.html#getMetaRegionSet--">getMetaRegionSet()</a></span> - Method in class org.apache.hadoop.hbase.master.assignment.<a href="org/apache/hadoop/hbase/master/assignment/AssignmentManager.html" title="class in org.apache.hadoop.hbase.master.assignment">AssignmentManager</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionState-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionState(ZKWatcher)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
@@ -43368,20 +43366,11 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html#getMetaScannerCaching--">getMetaScannerCaching()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/AsyncConnectionConfiguration.html" title="class in org.apache.hadoop.hbase.client">AsyncConnectionConfiguration</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection(ClusterConnection, ZKWatcher, long, int)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/Utility.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection(ClusterConnection, ZKWatcher, long, int)</a></span> - Static method in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/Utility.html" title="class in org.apache.hadoop.hbase.rsgroup">Utility</a></dt>
 <dd>
 <div class="block">Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the
  specified timeout for availability.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetaTableLocator--">getMetaTableLocator()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#getMetaTableLocator--">getMetaTableLocator()</a></span> - Method in class org.apache.hadoop.hbase.replication.regionserver.<a href="org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html" title="class in org.apache.hadoop.hbase.replication.regionserver">ReplicationSyncUp.DummyServer</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator()</a></span> - Method in interface org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></dt>
-<dd>
-<div class="block">Returns instance of <a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
- running inside this server.</div>
-</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/HMaster.html#getMetaTableObserver--">getMetaTableObserver()</a></span> - Method in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetaTableObserver--">getMetaTableObserver()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></dt>
@@ -64113,7 +64102,7 @@
 <div class="block">Given an InetAddress, checks to see if the address is a local address, by comparing the address
  with all the interfaces on the node.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#isLocationAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">isLocationAvailable(ZKWatcher)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#isLocationAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">isLocationAvailable(ZKWatcher)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Checks if the meta region location is available.</div>
 </dd>
@@ -70157,6 +70146,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html#LOG">LOG</a></span> - Variable in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html" title="class in org.apache.hadoop.hbase.rsgroup">RSGroupInfoManagerImpl.ServerEventsListenerThread</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/Utility.html#LOG">LOG</a></span> - Static variable in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/Utility.html" title="class in org.apache.hadoop.hbase.rsgroup">Utility</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/ScheduledChore.html#LOG">LOG</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.html#LOG">LOG</a></span> - Static variable in class org.apache.hadoop.hbase.security.<a href="org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.html" title="class in org.apache.hadoop.hbase.security">AbstractHBaseSaslRpcClient</a></dt>
@@ -74333,12 +74324,10 @@
 <dd>
 <div class="block">Table descriptor for <code>hbase:meta</code> catalog table</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html#metaTableLocator">metaTableLocator</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></dt>
-<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><span class="typeNameLink">MetaTableLocator</span></a> - Class in <a href="org/apache/hadoop/hbase/zookeeper/package-summary.html">org.apache.hadoop.hbase.zookeeper</a></dt>
 <dd>
-<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper
- which keeps hbase:meta region server location.</div>
+<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper which
+ keeps hbase:meta region server location.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#MetaTableLocator--">MetaTableLocator()</a></span> - Constructor for class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>&nbsp;</dd>
@@ -111103,10 +111092,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/JvmPauseMonitor.html#stop--">stop()</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/JvmPauseMonitor.html" title="class in org.apache.hadoop.hbase.util">JvmPauseMonitor</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#stop--">stop()</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
-<dd>
-<div class="block">Stop working.</div>
-</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.html#stop--">stop()</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKNodeTracker</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/metrics2/impl/JmxCacheBuster.html#stop--">stop()</a></span> - Static method in class org.apache.hadoop.metrics2.impl.<a href="org/apache/hadoop/metrics2/impl/JmxCacheBuster.html" title="class in org.apache.hadoop.metrics2.impl">JmxCacheBuster</a></dt>
@@ -111216,8 +111201,6 @@ service.</div>
 <dd>
 <div class="block">Flag for stopping the server</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#stopped">stopped</a></span> - Variable in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.html#stopped">stopped</a></span> - Variable in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKNodeTracker</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/metrics2/impl/JmxCacheBuster.html#stopped">stopped</a></span> - Static variable in class org.apache.hadoop.metrics2.impl.<a href="org/apache/hadoop/metrics2/impl/JmxCacheBuster.html" title="class in org.apache.hadoop.metrics2.impl">JmxCacheBuster</a></dt>
@@ -122304,11 +122287,11 @@ the order they are declared.</div>
 <div class="block">Perform the validation checks for a coprocessor to determine if the path
  is white listed or not.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation(ClusterConnection, ZKWatcher, long)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation(ClusterConnection, ZKWatcher, long)</a></span> - Static method in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/Utility.html" title="class in org.apache.hadoop.hbase.rsgroup">Utility</a></dt>
 <dd>
 <div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation(ClusterConnection, ZKWatcher, long, int)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation(ClusterConnection, ZKWatcher, long, int)</a></span> - Static method in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/Utility.html" title="class in org.apache.hadoop.hbase.rsgroup">Utility</a></dt>
 <dd>
 <div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
 </dd>
@@ -122325,7 +122308,7 @@ the order they are declared.</div>
 <dd>
 <div class="block">Verify that the regionInfo is valid</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation(ClusterConnection, AdminProtos.AdminService.BlockingInterface, ServerName, byte[])</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/Utility.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation(ClusterConnection, AdminProtos.AdminService.BlockingInterface, ServerName, byte[])</a></span> - Static method in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/Utility.html" title="class in org.apache.hadoop.hbase.rsgroup">Utility</a></dt>
 <dd>
 <div class="block">Verify we can connect to <code>hostingServer</code> and that its carrying
  <code>regionName</code>.</div>
@@ -123016,19 +122999,15 @@ the order they are declared.</div>
 <dd>
 <div class="block">Wait until AM finishes the meta loading, i.e, the region states rebuilding.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">waitMetaRegionLocation(ZKWatcher, long)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
-<dd>
-<div class="block">Gets the meta region location, if available, and waits for up to the
- specified timeout if not immediately available.</div>
-</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">waitMetaRegionLocation(ZKWatcher, int, long)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">waitMetaRegionLocation(ZKWatcher, long)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
 <div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
  immediately available.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">waitMetaRegionLocation(ZKWatcher)</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">waitMetaRegionLocation(ZKWatcher, int, long)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></dt>
 <dd>
-<div class="block">Waits indefinitely for availability of <code>hbase:meta</code>.</div>
+<div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
+ immediately available.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.html#waitMetaRegions-org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv-">waitMetaRegions(MasterProcedureEnv)</a></span> - Static method in class org.apache.hadoop.hbase.master.procedure.<a href="org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.html" title="class in org.apache.hadoop.hbase.master.procedure">ProcedureSyncWait</a></dt>
 <dd>&nbsp;</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/Server.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/Server.html b/devapidocs/org/apache/hadoop/hbase/Server.html
index f7faed2d..77c95a6 100644
--- a/devapidocs/org/apache/hadoop/hbase/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/Server.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":6,"i10":18};
+var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":18,"i7":6,"i8":6,"i9":18};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public interface <a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.37">Server</a>
+public interface <a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.34">Server</a>
 extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="interface in org.apache.hadoop.hbase">Abortable</a>, <a href="../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a></pre>
 <div class="block">Defines a curated set of shared functions implemented by HBase servers (Masters
  and RegionServers). For use internally only. Be judicious adding API. Changes cause ripples
@@ -174,23 +174,16 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i7" class="rowColor">
-<td class="colFirst"><code><a href="../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator</a></span>()</code>
-<div class="block">Returns instance of <a href="../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
- running inside this server.</div>
-</td>
-</tr>
-<tr id="i8" class="altColor">
 <td class="colFirst"><code><a href="../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/Server.html#getServerName--">getServerName</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i8" class="altColor">
 <td class="colFirst"><code><a href="../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/Server.html#getZooKeeper--">getZooKeeper</a></span>()</code>
 <div class="block">Gets the ZooKeeper instance for this server.</div>
 </td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>default boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></span>()</code>&nbsp;</td>
 </tr>
@@ -229,7 +222,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>getConfiguration</h4>
-<pre>org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.41">getConfiguration</a>()</pre>
+<pre>org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.38">getConfiguration</a>()</pre>
 <div class="block">Gets the configuration object for this server.</div>
 </li>
 </ul>
@@ -239,7 +232,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>getZooKeeper</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.46">getZooKeeper</a>()</pre>
+<pre><a href="../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.43">getZooKeeper</a>()</pre>
 <div class="block">Gets the ZooKeeper instance for this server.</div>
 </li>
 </ul>
@@ -249,7 +242,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>getConnection</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.54">getConnection</a>()</pre>
+<pre><a href="../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.51">getConnection</a>()</pre>
 <div class="block">Returns a reference to the servers' connection.
 
  Important note: this method returns a reference to Connection which is managed
@@ -262,7 +255,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>createConnection</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.56">createConnection</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre><a href="../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.53">createConnection</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -276,36 +269,20 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterConnection</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.64">getClusterConnection</a>()</pre>
+<pre><a href="../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.61">getClusterConnection</a>()</pre>
 <div class="block">Returns a reference to the servers' cluster connection. Prefer <a href="../../../../org/apache/hadoop/hbase/Server.html#getConnection--"><code>getConnection()</code></a>.
 
  Important note: this method returns a reference to Connection which is managed
  by Server itself, so callers must NOT attempt to close connection obtained.</div>
 </li>
 </ul>
-<a name="getMetaTableLocator--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>getMetaTableLocator</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.72">getMetaTableLocator</a>()</pre>
-<div class="block">Returns instance of <a href="../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
- running inside this server. This MetaServerLocator is started and stopped by server, clients
- shouldn't manage it's lifecycle.</div>
-<dl>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>instance of <a href="../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a> associated with this server.</dd>
-</dl>
-</li>
-</ul>
 <a name="getServerName--">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerName</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.77">getServerName</a>()</pre>
+<pre><a href="../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.66">getServerName</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>The unique server name for this server.</dd>
@@ -318,7 +295,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>getCoordinatedStateManager</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/CoordinatedStateManager.html" title="interface in org.apache.hadoop.hbase">CoordinatedStateManager</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.82">getCoordinatedStateManager</a>()</pre>
+<pre><a href="../../../../org/apache/hadoop/hbase/CoordinatedStateManager.html" title="interface in org.apache.hadoop.hbase">CoordinatedStateManager</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.71">getCoordinatedStateManager</a>()</pre>
 <div class="block">Get CoordinatedStateManager instance for this server.</div>
 </li>
 </ul>
@@ -328,7 +305,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>getChoreService</h4>
-<pre><a href="../../../../org/apache/hadoop/hbase/ChoreService.html" title="class in org.apache.hadoop.hbase">ChoreService</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.87">getChoreService</a>()</pre>
+<pre><a href="../../../../org/apache/hadoop/hbase/ChoreService.html" title="class in org.apache.hadoop.hbase">ChoreService</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.76">getChoreService</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>The <a href="../../../../org/apache/hadoop/hbase/ChoreService.html" title="class in org.apache.hadoop.hbase"><code>ChoreService</code></a> instance for this server</dd>
@@ -341,7 +318,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileSystem</h4>
-<pre>default&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.94">getFileSystem</a>()</pre>
+<pre>default&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.83">getFileSystem</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>Return the FileSystem object used (can return null!).</dd>
@@ -354,7 +331,7 @@ extends <a href="../../../../org/apache/hadoop/hbase/Abortable.html" title="inte
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isStopping</h4>
-<pre>default&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.112">isStopping</a>()</pre>
+<pre>default&nbsp;boolean&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Server.html#line.101">isStopping</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>True is the server is Stopping</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 19c67e5..2cd8945 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html b/devapidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
index d84319e..c822478 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/NotAllMetaRegionsOnlineException.html
@@ -103,7 +103,7 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       int&nbsp;replicaId,
                       long&nbsp;timeout)</code>
@@ -112,11 +112,11 @@
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       long&nbsp;timeout)</code>
-<div class="block">Gets the meta region location, if available, and waits for up to the
- specified timeout if not immediately available.</div>
+<div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
+ immediately available.</div>
 </td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 55dd46a..0ffda07 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -6260,26 +6260,41 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                   <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">RSGroupBasedLoadBalancer.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.html#regionOnline-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.hbase.ServerName-">regionOnline</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
             <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">RSGroupInfoManagerImpl.ServerEventsListenerThread.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html#serverAdded-org.apache.hadoop.hbase.ServerName-">serverAdded</a></span>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">RSGroupInfoManagerImpl.FailedOpenUpdaterThread.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html#serverAdded-org.apache.hadoop.hbase.ServerName-">serverAdded</a></span>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">RSGroupInfoManagerImpl.ServerEventsListenerThread.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html#serverRemoved-org.apache.hadoop.hbase.ServerName-">serverRemoved</a></span>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><span class="typeNameLabel">RSGroupInfoManagerImpl.FailedOpenUpdaterThread.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html#serverRemoved-org.apache.hadoop.hbase.ServerName-">serverRemoved</a></span>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName)</code>&nbsp;</td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static boolean</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;hostingServer,
+                    <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;address,
+                    byte[]&nbsp;regionName)</code>
+<div class="block">Verify we can connect to <code>hostingServer</code> and that its carrying
+ <code>regionName</code>.</div>
+</td>
+</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
@@ -6759,19 +6774,15 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
-<td class="colLast"><span class="typeNameLabel">HBaseFsck.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getMetaRegionServerName-int-">getMetaRegionServerName</a></span>(int&nbsp;replicaId)</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">RegionMover.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/RegionMover.html#getServerNameForRegion-org.apache.hadoop.hbase.client.RegionInfo-">getServerNameForRegion</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;region)</code>
 <div class="block">Get servername that is up in hbase:meta hosting the given region.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">RegionMover.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/RegionMover.html#getTargetServer--">getTargetServer</a></span>()</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">RegionMover.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/RegionMover.html#stripServer-java.util.List-java.lang.String-int-">stripServer</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;regionServers,
            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
@@ -7081,7 +7092,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">blockUntilAvailable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    int&nbsp;replicaId,
                    long&nbsp;timeout)</code>
@@ -7089,7 +7100,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">blockUntilAvailable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    long&nbsp;timeout)</code>
 <div class="block">Wait until the meta region is available and is not in transition.</div>
@@ -7114,13 +7125,13 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Gets the meta region location, if available.</div>
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                      int&nbsp;replicaId)</code>
 <div class="block">Gets the meta region location, if available.</div>
@@ -7131,7 +7142,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 <td class="colLast"><span class="typeNameLabel">ZKServerTool.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKServerTool.html#readZKNodes-org.apache.hadoop.conf.Configuration-">readZKNodes</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       int&nbsp;replicaId,
                       long&nbsp;timeout)</code>
@@ -7140,11 +7151,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       long&nbsp;timeout)</code>
-<div class="block">Gets the meta region location, if available, and waits for up to the
- specified timeout if not immediately available.</div>
+<div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
+ immediately available.</div>
 </td>
 </tr>
 </tbody>
@@ -7157,7 +7168,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-org.apache.hadoop.conf.Configuration-">blockUntilAvailable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    long&nbsp;timeout,
                    org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
@@ -7165,11 +7176,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                           int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions and their locations for the given path and replica ID.</div>
@@ -7191,11 +7202,6 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                   <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)</code>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">MasterAddressTracker.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.html#setMasterAddress-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-org.apache.hadoop.hbase.ServerName-int-">setMasterAddress</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
@@ -7206,7 +7212,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
  path.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#setMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ServerName-int-org.apache.hadoop.hbase.master.RegionState.State-">setMetaLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
@@ -7215,7 +7221,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 <div class="block">Sets the location of <code>hbase:meta</code> in ZooKeeper to the specified server address.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#setMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ServerName-org.apache.hadoop.hbase.master.RegionState.State-">setMetaLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
@@ -7224,21 +7230,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
  specified server address.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) static byte[]</code></td>
 <td class="colLast"><span class="typeNameLabel">MasterAddressTracker.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.html#toByteArray-org.apache.hadoop.hbase.ServerName-int-">toByteArray</a></span>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn,
            int&nbsp;infoPort)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>private boolean</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;hostingServer,
-                    <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;address,
-                    byte[]&nbsp;regionName)</code>
-<div class="block">Verify we can connect to <code>hostingServer</code> and that its carrying
- <code>regionName</code>.</div>
-</td>
-</tr>
 </tbody>
 </table>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
@@ -7249,7 +7245,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;pairs)</code>&nbsp;</td>
 </tr>
 </tbody>


[03/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>

<TRUNCATED>

[44/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
index 9e51da1..a620cbf 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
@@ -471,7 +471,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/Server.html" title="inte
 <!--   -->
 </a>
 <h3>Methods inherited from interface&nbsp;org.apache.hadoop.hbase.<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getServerName--">getServerName</a>, <a href="../../../../..
 /org/apache/hadoop/hbase/Server.html#getZooKeeper--">getZooKeeper</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getServerName--">getServerName</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getZooKeeper--">getZooKeeper</a>, <a href="../../../../../org/apache/ha
 doop/hbase/Server.html#isStopping--">isStopping</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.Abortable">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/MasterStatusServlet.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterStatusServlet.html b/devapidocs/org/apache/hadoop/hbase/master/MasterStatusServlet.html
index a837a46..0dafaa4 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterStatusServlet.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterStatusServlet.html
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.42">MasterStatusServlet</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.40">MasterStatusServlet</a>
 extends javax.servlet.http.HttpServlet</pre>
 <div class="block">The servlet responsible for rendering the index page of the
  master.</div>
@@ -242,7 +242,7 @@ extends javax.servlet.http.HttpServlet</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.43">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.41">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.master.MasterStatusServlet.serialVersionUID">Constant Field Values</a></dd>
@@ -263,7 +263,7 @@ extends javax.servlet.http.HttpServlet</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MasterStatusServlet</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.42">MasterStatusServlet</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.40">MasterStatusServlet</a>()</pre>
 </li>
 </ul>
 </li>
@@ -280,7 +280,7 @@ extends javax.servlet.http.HttpServlet</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>doGet</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.46">doGet</a>(javax.servlet.http.HttpServletRequest&nbsp;request,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.44">doGet</a>(javax.servlet.http.HttpServletRequest&nbsp;request,
                   javax.servlet.http.HttpServletResponse&nbsp;response)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -297,7 +297,7 @@ extends javax.servlet.http.HttpServlet</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaLocationOrNull</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.84">getMetaLocationOrNull</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)</pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.82">getMetaLocationOrNull</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master)</pre>
 </li>
 </ul>
 <a name="getFragmentationInfo-org.apache.hadoop.hbase.master.HMaster-org.apache.hadoop.conf.Configuration-">
@@ -306,7 +306,7 @@ extends javax.servlet.http.HttpServlet</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getFragmentationInfo</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.90">getFragmentationInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master,
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html#line.86">getFragmentationInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;master,
                                                  org.apache.hadoop.conf.Configuration&nbsp;conf)
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index c03b1ea..9b187b9 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -347,10 +347,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 35a3477..86fe2b3 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -216,8 +216,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
index 4b549da..7b435d2 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
@@ -127,8 +127,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/TaskMonitor.TaskFilter.TaskType.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">TaskMonitor.TaskFilter.TaskType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/TaskMonitor.TaskFilter.TaskType.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">TaskMonitor.TaskFilter.TaskType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 7fecd9d..0fa5060 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -432,19 +432,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index f6e32b6..0d9bea0 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 2357779..1815591 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,13 +229,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index b76dcda..b8d0e70 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1730">HRegionServer.CompactionChecker</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1719">HRegionServer.CompactionChecker</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></pre>
 </li>
 </ul>
@@ -233,7 +233,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>instance</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1731">instance</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1720">instance</a></pre>
 </li>
 </ul>
 <a name="majorCompactPriority">
@@ -242,7 +242,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>majorCompactPriority</h4>
-<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1732">majorCompactPriority</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1721">majorCompactPriority</a></pre>
 </li>
 </ul>
 <a name="DEFAULT_PRIORITY">
@@ -251,7 +251,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_PRIORITY</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1733">DEFAULT_PRIORITY</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1722">DEFAULT_PRIORITY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HRegionServer.CompactionChecker.DEFAULT_PRIORITY">Constant Field Values</a></dd>
@@ -264,7 +264,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>iteration</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1736">iteration</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1725">iteration</a></pre>
 </li>
 </ul>
 </li>
@@ -281,7 +281,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CompactionChecker</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1738">CompactionChecker</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;h,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1727">CompactionChecker</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;h,
                   int&nbsp;sleepTime,
                   <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a>&nbsp;stopper)</pre>
 </li>
@@ -300,7 +300,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1752">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1741">chore</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore--">ScheduledChore</a></code></span></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index da8e035..5d4def3 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3445">HRegionServer.MovedRegionInfo</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3429">HRegionServer.MovedRegionInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -218,7 +218,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>serverName</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3446">serverName</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3430">serverName</a></pre>
 </li>
 </ul>
 <a name="seqNum">
@@ -227,7 +227,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>seqNum</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3447">seqNum</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3431">seqNum</a></pre>
 </li>
 </ul>
 <a name="ts">
@@ -236,7 +236,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ts</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3448">ts</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3432">ts</a></pre>
 </li>
 </ul>
 </li>
@@ -253,7 +253,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MovedRegionInfo</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3450">MovedRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3434">MovedRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                        long&nbsp;closeSeqNum)</pre>
 </li>
 </ul>
@@ -271,7 +271,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3456">getServerName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3440">getServerName</a>()</pre>
 </li>
 </ul>
 <a name="getSeqNum--">
@@ -280,7 +280,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getSeqNum</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3460">getSeqNum</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3444">getSeqNum</a>()</pre>
 </li>
 </ul>
 <a name="getMoveTime--">
@@ -289,7 +289,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMoveTime</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3464">getMoveTime</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3448">getMoveTime</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 20c585f..2a00361 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>protected static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3532">HRegionServer.MovedRegionsCleaner</a>
+<pre>protected static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3516">HRegionServer.MovedRegionsCleaner</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a></pre>
 <div class="block">Creates a Chore thread to clean the moved region cache.</div>
@@ -242,7 +242,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>regionServer</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3533">regionServer</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3517">regionServer</a></pre>
 </li>
 </ul>
 <a name="stoppable">
@@ -251,7 +251,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>stoppable</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3534">stoppable</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3518">stoppable</a></pre>
 </li>
 </ul>
 </li>
@@ -268,7 +268,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MovedRegionsCleaner</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3536">MovedRegionsCleaner</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;regionServer,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3520">MovedRegionsCleaner</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;regionServer,
                             <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a>&nbsp;stoppable)</pre>
 </li>
 </ul>
@@ -286,7 +286,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>create</h4>
-<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionsCleaner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3544">create</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;rs)</pre>
+<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionsCleaner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3528">create</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;rs)</pre>
 </li>
 </ul>
 <a name="chore--">
@@ -295,7 +295,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3555">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3539">chore</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore--">ScheduledChore</a></code></span></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>
@@ -310,7 +310,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3560">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3544">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-">Stoppable</a></code></span></div>
 <div class="block">Stop this service.
  Implementers should favor logging errors over throwing RuntimeExceptions.</div>
@@ -328,7 +328,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isStopped</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3565">isStopped</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3549">isStopped</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html#isStopped--">isStopped</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
index a2b0120..fce5824 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1792">HRegionServer.PeriodicMemStoreFlusher</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1781">HRegionServer.PeriodicMemStoreFlusher</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></pre>
 </li>
 </ul>
@@ -232,7 +232,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>server</h4>
-<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1793">server</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1782">server</a></pre>
 </li>
 </ul>
 <a name="RANGE_OF_DELAY">
@@ -241,7 +241,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>RANGE_OF_DELAY</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1794">RANGE_OF_DELAY</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1783">RANGE_OF_DELAY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HRegionServer.PeriodicMemStoreFlusher.RANGE_OF_DELAY">Constant Field Values</a></dd>
@@ -254,7 +254,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>MIN_DELAY_TIME</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1795">MIN_DELAY_TIME</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1784">MIN_DELAY_TIME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HRegionServer.PeriodicMemStoreFlusher.MIN_DELAY_TIME">Constant Field Values</a></dd>
@@ -267,7 +267,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>rangeOfDelay</h4>
-<pre>final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1797">rangeOfDelay</a></pre>
+<pre>final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1786">rangeOfDelay</a></pre>
 </li>
 </ul>
 </li>
@@ -284,7 +284,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PeriodicMemStoreFlusher</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1798">PeriodicMemStoreFlusher</a>(int&nbsp;cacheFlushInterval,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1787">PeriodicMemStoreFlusher</a>(int&nbsp;cacheFlushInterval,
                                <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;server)</pre>
 </li>
 </ul>
@@ -302,7 +302,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1807">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1796">chore</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore--">ScheduledChore</a></code></span></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
index 7e272c9..ff60765 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3836">HRegionServer.SystemExitWhenAbortTimeout</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3820">HRegionServer.SystemExitWhenAbortTimeout</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/util/TimerTask.html?is-external=true" title="class or interface in java.util">TimerTask</a></pre>
 <div class="block">Force to terminate region server when abort timeout.</div>
 </li>
@@ -199,7 +199,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/util/TimerTask.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SystemExitWhenAbortTimeout</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3838">SystemExitWhenAbortTimeout</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3822">SystemExitWhenAbortTimeout</a>()</pre>
 </li>
 </ul>
 </li>
@@ -216,7 +216,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/util/TimerTask.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3842">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3826">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>


[50/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/checkstyle-aggregate.html
----------------------------------------------------------------------
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index e1d46ba..ce593d3 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Checkstyle Results</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -291,10 +291,10 @@
 <th><img src="images/icon_warning_sml.gif" alt="" />&#160;Warnings</th>
 <th><img src="images/icon_error_sml.gif" alt="" />&#160;Errors</th></tr>
 <tr class="b">
-<td>3816</td>
+<td>3817</td>
 <td>0</td>
 <td>0</td>
-<td>14791</td></tr></table></div>
+<td>14743</td></tr></table></div>
 <div class="section">
 <h2><a name="Files"></a>Files</h2>
 <table border="0" class="table table-striped">
@@ -669,170 +669,160 @@
 <td>0</td>
 <td>5</td></tr>
 <tr class="a">
-<td><a href="#org.apache.hadoop.hbase.Server.java">org/apache/hadoop/hbase/Server.java</a></td>
-<td>0</td>
-<td>0</td>
-<td>1</td></tr>
-<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ServerLoad.java">org/apache/hadoop/hbase/ServerLoad.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.ServerName.java">org/apache/hadoop/hbase/ServerName.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.SplitLogCounters.java">org/apache/hadoop/hbase/SplitLogCounters.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.SplitLogTask.java">org/apache/hadoop/hbase/SplitLogTask.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.StripeCompactionsPerformanceEvaluation.java">org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TableDescriptors.java">org/apache/hadoop/hbase/TableDescriptors.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TableInfoMissingException.java">org/apache/hadoop/hbase/TableInfoMissingException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TableName.java">org/apache/hadoop/hbase/TableName.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>17</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TableNotDisabledException.java">org/apache/hadoop/hbase/TableNotDisabledException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TableNotEnabledException.java">org/apache/hadoop/hbase/TableNotEnabledException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TableNotFoundException.java">org/apache/hadoop/hbase/TableNotFoundException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TagType.java">org/apache/hadoop/hbase/TagType.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestCellUtil.java">org/apache/hadoop/hbase/TestCellUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestClassFinder.java">org/apache/hadoop/hbase/TestClassFinder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestClientClusterStatus.java">org/apache/hadoop/hbase/TestClientClusterStatus.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestClientOperationTimeout.java">org/apache/hadoop/hbase/TestClientOperationTimeout.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestClusterPortAssignment.java">org/apache/hadoop/hbase/TestClusterPortAssignment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestFSTableDescriptorForceCreation.java">org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestGlobalMemStoreSize.java">org/apache/hadoop/hbase/TestGlobalMemStoreSize.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestHBaseConfiguration.java">org/apache/hadoop/hbase/TestHBaseConfiguration.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestHBaseTestingUtility.java">org/apache/hadoop/hbase/TestHBaseTestingUtility.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestHColumnDescriptor.java">org/apache/hadoop/hbase/TestHColumnDescriptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestHColumnDescriptorDefaultVersions.java">org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestHTableDescriptor.java">org/apache/hadoop/hbase/TestHTableDescriptor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestIOFencing.java">org/apache/hadoop/hbase/TestIOFencing.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestInfoServers.java">org/apache/hadoop/hbase/TestInfoServers.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestJMXConnectorServer.java">org/apache/hadoop/hbase/TestJMXConnectorServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestKeyValue.java">org/apache/hadoop/hbase/TestKeyValue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.TestLocalHBaseCluster.java">org/apache/hadoop/hbase/TestLocalHBaseCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
-<td><a href="#org.apache.hadoop.hbase.TestMetaTableAccessor.java">org/apache/hadoop/hbase/TestMetaTableAccessor.java</a></td>
-<td>0</td>
-<td>0</td>
-<td>8</td></tr>
 <tr class="b">
-<td><a href="#org.apache.hadoop.hbase.TestMetaTableAccessorNoCluster.java">org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java</a></td>
+<td><a href="#org.apache.hadoop.hbase.TestMetaTableAccessor.java">org/apache/hadoop/hbase/TestMetaTableAccessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
 <tr class="a">
-<td><a href="#org.apache.hadoop.hbase.TestMetaTableLocator.java">org/apache/hadoop/hbase/TestMetaTableLocator.java</a></td>
+<td><a href="#org.apache.hadoop.hbase.TestMetaTableAccessorNoCluster.java">org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>41</td></tr>
+<td>7</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.TestMovedRegionsCleaner.java">org/apache/hadoop/hbase/TestMovedRegionsCleaner.java</a></td>
 <td>0</td>
@@ -4602,7 +4592,7 @@
 <td><a href="#org.apache.hadoop.hbase.master.MasterStatusServlet.java">org/apache/hadoop/hbase/master/MasterStatusServlet.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>4</td></tr>
+<td>3</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.master.MasterWalManager.java">org/apache/hadoop/hbase/master/MasterWalManager.java</a></td>
 <td>0</td>
@@ -5487,7 +5477,7 @@
 <td><a href="#org.apache.hadoop.hbase.procedure.flush.MasterFlushTableProcedureManager.java">org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>2</td></tr>
+<td>1</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.procedure.flush.RegionServerFlushTableProcedureManager.java">org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java</a></td>
 <td>0</td>
@@ -6182,7 +6172,7 @@
 <td><a href="#org.apache.hadoop.hbase.regionserver.HRegionServer.java">org/apache/hadoop/hbase/regionserver/HRegionServer.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>74</td></tr>
+<td>73</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HRegionServerCommandLine.java">org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java</a></td>
 <td>0</td>
@@ -9027,7 +9017,7 @@
 <td><a href="#org.apache.hadoop.hbase.util.HBaseFsck.java">org/apache/hadoop/hbase/util/HBaseFsck.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>104</td></tr>
+<td>102</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.util.HBaseFsckRepair.java">org/apache/hadoop/hbase/util/HBaseFsckRepair.java</a></td>
 <td>0</td>
@@ -9674,7 +9664,7 @@
 <tr class="a">
 <td>blocks</td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_blocks.html#EmptyBlock">EmptyBlock</a></td>
-<td>41</td>
+<td>40</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td></td>
@@ -9684,12 +9674,12 @@
 <tr class="a">
 <td></td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_blocks.html#NeedBraces">NeedBraces</a></td>
-<td>1760</td>
+<td>1756</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td>coding</td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_coding.html#EmptyStatement">EmptyStatement</a></td>
-<td>30</td>
+<td>29</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td></td>
@@ -9751,7 +9741,7 @@
 <li>sortStaticImportsAlphabetically: <tt>&quot;true&quot;</tt></li>
 <li>groups: <tt>&quot;*,org.apache.hbase.thirdparty,org.apache.hadoop.hbase.shaded&quot;</tt></li>
 <li>option: <tt>&quot;top&quot;</tt></li></ul></td>
-<td>1131</td>
+<td>1128</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td></td>
@@ -9763,7 +9753,7 @@
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_imports.html#UnusedImports">UnusedImports</a>
 <ul>
 <li>processJavadoc: <tt>&quot;true&quot;</tt></li></ul></td>
-<td>95</td>
+<td>94</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td>indentation</td>
@@ -9774,19 +9764,19 @@
 <li>caseIndent: <tt>&quot;2&quot;</tt></li>
 <li>basicOffset: <tt>&quot;2&quot;</tt></li>
 <li>lineWrappingIndentation: <tt>&quot;2&quot;</tt></li></ul></td>
-<td>4663</td>
+<td>4649</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td>javadoc</td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation">JavadocTagContinuationIndentation</a>
 <ul>
 <li>offset: <tt>&quot;2&quot;</tt></li></ul></td>
-<td>730</td>
+<td>728</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td></td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription">NonEmptyAtclauseDescription</a></td>
-<td>3436</td>
+<td>3417</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td>misc</td>
@@ -9804,7 +9794,7 @@
 <ul>
 <li>max: <tt>&quot;100&quot;</tt></li>
 <li>ignorePattern: <tt>&quot;^package.*|^import.*|a href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated&quot;</tt></li></ul></td>
-<td>1429</td>
+<td>1426</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td></td>
@@ -17320,21 +17310,6 @@
 <td>'if' construct must use '{}'s.</td>
 <td>274</td></tr></table></div>
 <div class="section">
-<h3 id="org.apache.hadoop.hbase.Server.java">org/apache/hadoop/hbase/Server.java</h3>
-<table border="0" class="table table-striped">
-<tr class="a">
-<th>Severity</th>
-<th>Category</th>
-<th>Rule</th>
-<th>Message</th>
-<th>Line</th></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>imports</td>
-<td>ImportOrder</td>
-<td>Wrong order for 'java.io.IOException' import.</td>
-<td>29</td></tr></table></div>
-<div class="section">
 <h3 id="org.apache.hadoop.hbase.ServerLoad.java">org/apache/hadoop/hbase/ServerLoad.java</h3>
 <table border="0" class="table table-striped">
 <tr class="a">
@@ -18323,353 +18298,92 @@
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>251</td></tr>
+<td>287</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>289</td></tr>
+<td>310</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>312</td></tr>
+<td>322</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>324</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 103).</td>
-<td>704</td></tr></table></div>
+<td>702</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestMetaTableAccessorNoCluster.java">org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 105).</td>
 <td>116</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>126</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>127</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'(' has incorrect indentation level 6, expected level should be 8.</td>
 <td>194</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>MethodParamPad</td>
 <td>'(' should be on the previous line.</td>
 <td>194</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 122).</td>
 <td>205</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>blocks</td>
-<td>NeedBraces</td>
-<td>'if' construct must use '{}'s.</td>
-<td>212</td></tr></table></div>
-<div class="section">
-<h3 id="org.apache.hadoop.hbase.TestMetaTableLocator.java">org/apache/hadoop/hbase/TestMetaTableLocator.java</h3>
-<table border="0" class="table table-striped">
 <tr class="a">
-<th>Severity</th>
-<th>Category</th>
-<th>Rule</th>
-<th>Message</th>
-<th>Line</th></tr>
-<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>129</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>150</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>151</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>152</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>155</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'method call' child has incorrect indentation level 4, expected level should be 6.</td>
-<td>160</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>blocks</td>
-<td>NeedBraces</td>
-<td>'while' construct must use '{}'s.</td>
-<td>176</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>186</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>210</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>211</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>212</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>213</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>217</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>230</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>231</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>232</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>233</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>237</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>243</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>244</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>245</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>246</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>250</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'method def' child has incorrect indentation level 8, expected level should be 4.</td>
-<td>260</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'method def' child has incorrect indentation level 8, expected level should be 4.</td>
-<td>261</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'method def' child has incorrect indentation level 8, expected level should be 4.</td>
-<td>263</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>276</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>282</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>283</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>284</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>287</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>JavadocTagContinuationIndentation</td>
-<td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>306</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>sizes</td>
-<td>LineLength</td>
-<td>Line is longer than 100 characters (found 109).</td>
-<td>309</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>sizes</td>
-<td>LineLength</td>
-<td>Line is longer than 100 characters (found 103).</td>
-<td>310</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>JavadocTagContinuationIndentation</td>
-<td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>311</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>sizes</td>
-<td>LineLength</td>
-<td>Line is longer than 100 characters (found 112).</td>
-<td>312</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
-<td>At-clause should have a non-empty description.</td>
-<td>315</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>indentation</td>
-<td>Indentation</td>
-<td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>319</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>blocks</td>
-<td>EmptyBlock</td>
-<td>Must have at least one statement.</td>
-<td>346</td></tr>
-<tr class="a">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>blocks</td>
-<td>NeedBraces</td>
-<td>'while' construct must use '{}'s.</td>
-<td>375</td></tr>
-<tr class="b">
-<td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>coding</td>
-<td>EmptyStatement</td>
-<td>Empty statement.</td>
-<td>375</td></tr></table></div>
+<td>212</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestMovedRegionsCleaner.java">org/apache/hadoop/hbase/TestMovedRegionsCleaner.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>77</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
@@ -18678,31 +18392,31 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestMultiVersions.java">org/apache/hadoop/hbase/TestMultiVersions.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>84</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'object def rcurly' has incorrect indentation level 5, expected level should be one of the following: 4, 6.</td>
 <td>112</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>197</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>coding</td>
 <td>InnerAssignment</td>
@@ -18711,25 +18425,25 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestNamespace.java">org/apache/hadoop/hbase/TestNamespace.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 105).</td>
 <td>208</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 105).</td>
 <td>338</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
@@ -18738,13 +18452,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestNodeHealthCheckChore.java">org/apache/hadoop/hbase/TestNodeHealthCheckChore.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
@@ -18753,151 +18467,151 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestPartialResultsFromClientSide.java">org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>142</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>177</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>210</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>248</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>321</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>341</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>375</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>396</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>404</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>407</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>457</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>559</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>573</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>654</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>655</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>656</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>665</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>683</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>684</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>685</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>689</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>695</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>696</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
@@ -18906,73 +18620,73 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestPerformanceEvaluation.java">org/apache/hadoop/hbase/TestPerformanceEvaluation.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>AvoidStarImport</td>
 <td>Using the '.*' form of import should be avoided - org.junit.Assert.*.</td>
 <td>20</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>75</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>88</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>173</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>UpperEll</td>
 <td>Should use uppercase 'L'.</td>
 <td>199</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>UpperEll</td>
 <td>Should use uppercase 'L'.</td>
 <td>199</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>UpperEll</td>
 <td>Should use uppercase 'L'.</td>
 <td>199</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>UpperEll</td>
 <td>Should use uppercase 'L'.</td>
 <td>200</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>UpperEll</td>
 <td>Should use uppercase 'L'.</td>
 <td>200</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>UpperEll</td>
 <td>Should use uppercase 'L'.</td>
 <td>202</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>UpperEll</td>
@@ -18981,49 +18695,49 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestRegionRebalancing.java">org/apache/hadoop/hbase/TestRegionRebalancing.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization lcurly' has incorrect indentation level 12, expected level should be one of the following: 10, 25, 27.</td>
 <td>70</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>103</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>104</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>108</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>188</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>217</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
@@ -19032,13 +18746,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestSerialization.java">org/apache/hadoop/hbase/TestSerialization.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
@@ -19047,67 +18761,67 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestServerSideScanMetricsFromClientSide.java">org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>Javadoc comment at column 0 has parse error. Details: no viable alternative at input '   *' while parsing JAVADOC_TAG</td>
 <td>117</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>142</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
 <td>215</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>321</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
 <td>322</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>327</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>328</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>329</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>330</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>EmptyBlock</td>
@@ -19116,25 +18830,25 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestStochasticBalancerJmxMetrics.java">org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>202</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>211</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
@@ -19143,73 +18857,73 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestTimeout.java">org/apache/hadoop/hbase/TestTimeout.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>UnusedImports</td>
 <td>Unused import - org.apache.hadoop.hbase.util.Threads.</td>
 <td>21</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def modifier' has incorrect indentation level 4, expected level should be 2.</td>
 <td>34</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 6, expected level should be 4.</td>
 <td>36</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def rcurly' has incorrect indentation level 4, expected level should be 2.</td>
 <td>37</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def modifier' has incorrect indentation level 4, expected level should be 2.</td>
 <td>43</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 6, expected level should be 4.</td>
 <td>46</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>EmptyBlock</td>
 <td>Must have at least one statement.</td>
 <td>49</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 6, expected level should be 4.</td>
 <td>53</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'while' has incorrect indentation level 6, expected level should be 4.</td>
 <td>54</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>EmptyBlock</td>
 <td>Must have at least one statement.</td>
 <td>54</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
@@ -19218,91 +18932,91 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TimestampTestBase.java">org/apache/hadoop/hbase/TimestampTestBase.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.hbase.client.Durability' import.</td>
 <td>30</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>design</td>
 <td>HideUtilityClassConstructor</td>
 <td>Utility classes should not have a public or default constructor.</td>
 <td>39</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>61</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>107</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>127</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>178</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>203</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>EmptyBlock</td>
 <td>Must have at least one statement.</td>
 <td>209</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>231</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>236</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>250</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>262</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
 <td>267</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
@@ -19311,13 +19025,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.UnknownRegionException.java">org/apache/hadoop/hbase/UnknownRegionException.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19326,19 +19040,19 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.Waiter.java">org/apache/hadoop/hbase/Waiter.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>coding</td>
 <td>InnerAssignment</td>
 <td>Inner assignments should be avoided.</td>
 <td>191</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>coding</td>
 <td>InnerAssignment</td>
@@ -19347,19 +19061,19 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.ZNodeClearer.java">org/apache/hadoop/hbase/ZNodeClearer.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>108</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
@@ -19368,13 +19082,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.BackupDriver.java">org/apache/hadoop/hbase/backup/BackupDriver.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19383,13 +19097,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.FailedArchiveException.java">org/apache/hadoop/hbase/backup/FailedArchiveException.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19398,85 +19112,85 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.HFileArchiver.java">org/apache/hadoop/hbase/backup/HFileArchiver.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>design</td>
 <td>FinalClass</td>
 <td>Class HFileArchiver should be declared as final.</td>
 <td>54</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def modifier' has incorrect indentation level 8, expected level should be one of the following: 4, 6.</td>
 <td>64</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 10, expected level should be one of the following: 6, 8.</td>
 <td>66</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def rcurly' has incorrect indentation level 8, expected level should be one of the following: 4, 6.</td>
 <td>67</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'object def rcurly' has incorrect indentation level 6, expected level should be one of the following: 2, 4.</td>
 <td>68</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>90</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>244</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>559</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>565</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>582</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>584</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>620</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
@@ -19485,13 +19199,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.TestBackupMerge.java">org/apache/hadoop/hbase/backup/TestBackupMerge.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19500,31 +19214,31 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.example.HFileArchiveManager.java">org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.hbase.client.Connection' import.</td>
 <td>26</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 115).</td>
 <td>71</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 6, expected level should be 4.</td>
 <td>78</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
@@ -19533,37 +19247,37 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.example.LongTermArchivingHFileCleaner.java">org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.conf.Configuration' import.</td>
 <td>23</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>56</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>63</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
 <td>72</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
@@ -19572,49 +19286,49 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.example.TableHFileArchiveTracker.java">org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.conf.Configuration' import.</td>
 <td>25</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.hbase.zookeeper.ZKListener' import.</td>
 <td>28</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>design</td>
 <td>FinalClass</td>
 <td>Class TableHFileArchiveTracker should be declared as final.</td>
 <td>40</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>70</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>93</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>137</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
@@ -19623,61 +19337,61 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.example.TestZooKeeperTableArchiveClient.java">org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>272</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>273</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>296</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 103).</td>
 <td>299</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>EmptyBlock</td>
 <td>Must have at least one statement.</td>
 <td>349</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>377</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
 <td>402</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>429</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
@@ -19686,19 +19400,19 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.example.ZKTableArchiveClient.java">org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.conf.Configuration' import.</td>
 <td>24</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
@@ -19707,13 +19421,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.impl.BackupCommands.java">org/apache/hadoop/hbase/backup/impl/BackupCommands.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19722,13 +19436,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.impl.BackupManager.java">org/apache/hadoop/hbase/backup/impl/BackupManager.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19737,13 +19451,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.backup.impl.BackupSystemTable.java">org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19752,37 +19466,37 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.Action.java">org/apache/hadoop/hbase/chaos/actions/Action.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 134).</td>
 <td>278</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 142).</td>
 <td>280</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 141).</td>
 <td>282</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 134).</td>
 <td>308</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
@@ -19791,13 +19505,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.ChangeCompressionAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeCompressionAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19806,13 +19520,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.ChangeEncodingAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeEncodingAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19821,13 +19535,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.ChangeSplitPolicyAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeSplitPolicyAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19836,13 +19550,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.ChangeVersionsAction.java">org/apache/hadoop/hbase/chaos/actions/ChangeVersionsAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
@@ -19851,13 +19565,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.FlushRandomRegionOfTableAction.java">org/apache/hadoop/hbase/chaos/actions/FlushRandomRegionOfTableAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
@@ -19866,13 +19580,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.RestartRandomDataNodeAction.java">org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -19881,127 +19595,127 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.RollingBatchRestartRsAction.java">org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
 <td>35</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'case' child has incorrect indentation level 6, expected level should be 8.</td>
 <td>84</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'block' child has incorrect indentation level 9, expected level should be 10.</td>
 <td>85</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try' has incorrect indentation level 8, expected level should be 10.</td>
 <td>86</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try' child has incorrect indentation level 10, expected level should be 12.</td>
 <td>87</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try rcurly' has incorrect indentation level 8, expected level should be 10.</td>
 <td>88</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'catch' child has incorrect indentation level 10, expected level should be 12.</td>
 <td>91</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'catch rcurly' has incorrect indentation level 8, expected level should be 10.</td>
 <td>92</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'block' child has incorrect indentation level 8, expected level should be 10.</td>
 <td>93</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'block' child has incorrect indentation level 8, expected level should be 10.</td>
 <td>94</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'case' child has incorrect indentation level 6, expected level should be 8.</td>
 <td>95</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try' has incorrect indentation level 8, expected level should be 10.</td>
 <td>96</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try' child has incorrect indentation level 10, expected level should be 12.</td>
 <td>97</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try' child has incorrect indentation level 10, expected level should be 12.</td>
 <td>98</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'try rcurly' has incorrect indentation level 8, expected level should be 10.</td>
 <td>99</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'catch' child has incorrect indentation level 10, expected level should be 12.</td>
 <td>102</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'catch rcurly' has incorrect indentation level 8, expected level should be 10.</td>
 <td>103</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'block' child has incorrect indentation level 8, expected level should be 10.</td>
 <td>104</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
 <td>117</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
@@ -20010,13 +19724,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.SplitAllRegionOfTableAction.java">org/apache/hadoop/hbase/chaos/actions/SplitAllRegionOfTableAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -20025,13 +19739,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.actions.TruncateTableAction.java">org/apache/hadoop/hbase/chaos/actions/TruncateTableAction.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -20040,37 +19754,37 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.factories.MasterKillingMonkeyFactory.java">org/apache/hadoop/hbase/chaos/factories/MasterKillingMonkeyFactory.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>45</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>50</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 6, expected level should be 4.</td>
 <td>60</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 6, expected level should be 4.</td>
 <td>63</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
@@ -20079,13 +19793,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.factories.MobNoKillMonkeyFactory.java">org/apache/hadoop/hbase/chaos/factories/MobNoKillMonkeyFactory.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>AvoidStarImport</td>
@@ -20094,151 +19808,151 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.factories.MobSlowDeterministicMonkeyFactory.java">org/apache/hadoop/hbase/chaos/factories/MobSlowDeterministicMonkeyFactory.java</h3>
 <table border="0" class="table table-striped">
-<tr class="a">
+<tr class="b">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>AvoidStarImport</td>
 <td>Using the '.*' form of import should be avoided - org.apache.hadoop.hbase.chaos.actions.*.</td>
 <td>21</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>58</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>59</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>60</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>61</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>62</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>63</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>70</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>71</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>72</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>73</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>74</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>75</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>76</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>77</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>78</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>83</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>85</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>86</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>87</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>88</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>89</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 12, expected level should be 6.</td>
 <td>91</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
@@ -20247,13 +19961,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.factories.MonkeyConstants.java">org/apache/hadoop/hbase/chaos/factories/MonkeyConstants.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>design</td>
 <td>InterfaceIsType</td>
@@ -20262,13 +19976,13 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.factories.MonkeyFactory.java">org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>imports</td>
 <td>ImportOrder</td>
@@ -20277,115 +19991,115 @@
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.chaos.factories.NoKillMonkeyFactory.java">org/apache/hadoop/hbase/chaos/factories/NoKillMonkeyFactory.java</h3>
 <table border="0" class="table table-striped">
-<tr class="b">
+<tr class="a">
 <th>Severity</th>
 <th>Category</th>
 <th>Rule</th>
 <th>Message</th>
 <th>Line</th></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>50</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>51</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>53</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>54</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>55</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>59</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>60</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>61</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>62</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>63</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>64</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>65</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>66</td></tr>
-<tr class="b">
+<tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'array initialization' child has incorrect indentation level 8, expected level should be 6.</td>
 <td>67</td></tr>
-<tr class="a">
+<tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td

<TRUNCATED>

[15/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurrentFi

<TRUNCATED>

[40/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
index 03f5fdc..3efb403 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3899">HBaseFsck.OnlineEntry</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3893">HBaseFsck.OnlineEntry</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Stores the regioninfo retrieved from Online region servers.</div>
 </li>
@@ -206,7 +206,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hri</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3900">hri</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3894">hri</a></pre>
 </li>
 </ul>
 <a name="hsa">
@@ -215,7 +215,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hsa</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3901">hsa</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3895">hsa</a></pre>
 </li>
 </ul>
 </li>
@@ -232,7 +232,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>OnlineEntry</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3899">OnlineEntry</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3893">OnlineEntry</a>()</pre>
 </li>
 </ul>
 </li>
@@ -249,7 +249,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3904">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3898">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
index 47b9a9e..0be63ac 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4208">HBaseFsck.PrintingErrorReporter</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4202">HBaseFsck.PrintingErrorReporter</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></pre>
 </li>
@@ -301,7 +301,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>errorCount</h4>
-<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4209">errorCount</a></pre>
+<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4203">errorCount</a></pre>
 </li>
 </ul>
 <a name="showProgress">
@@ -310,7 +310,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>showProgress</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4210">showProgress</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4204">showProgress</a></pre>
 </li>
 </ul>
 <a name="progressThreshold">
@@ -319,7 +319,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>progressThreshold</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4212">progressThreshold</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4206">progressThreshold</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.PrintingErrorReporter.progressThreshold">Constant Field Values</a></dd>
@@ -332,7 +332,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>errorTables</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4214">errorTables</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4208">errorTables</a></pre>
 </li>
 </ul>
 <a name="errorList">
@@ -341,7 +341,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockListLast">
 <li class="blockList">
 <h4>errorList</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4217">errorList</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4211">errorList</a></pre>
 </li>
 </ul>
 </li>
@@ -358,7 +358,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PrintingErrorReporter</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4208">PrintingErrorReporter</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4202">PrintingErrorReporter</a>()</pre>
 </li>
 </ul>
 </li>
@@ -375,7 +375,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>clear</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4220">clear</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4214">clear</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#clear--">clear</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -388,7 +388,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4227">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4221">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -402,7 +402,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4242">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4236">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 <dl>
@@ -417,7 +417,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4248">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4242">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info)</pre>
@@ -433,7 +433,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4256">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4250">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info1,
@@ -450,7 +450,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4265">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4259">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#reportError-java.lang.String-">reportError</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -463,7 +463,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>report</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4275">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4269">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <div class="block">Report error information, but do not increment the error count.  Intended for cases
  where the actual error would have been reported previously.</div>
 <dl>
@@ -480,7 +480,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>summarize</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4283">summarize</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4277">summarize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#summarize--">summarize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -493,7 +493,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>getErrorList</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4296">getErrorList</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4290">getErrorList</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#getErrorList--">getErrorList</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -506,7 +506,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>print</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4301">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4295">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#print-java.lang.String-">print</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -519,7 +519,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>tableHasErrors</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4308">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4302">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#tableHasErrors-org.apache.hadoop.hbase.util.HBaseFsck.TableInfo-">tableHasErrors</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -532,7 +532,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>resetErrors</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4313">resetErrors</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4307">resetErrors</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#resetErrors--">resetErrors</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -545,7 +545,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>detail</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4318">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4312">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#detail-java.lang.String-">detail</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -558,7 +558,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockListLast">
 <li class="blockList">
 <h4>progress</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4326">progress</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4320">progress</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#progress--">progress</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
index 71e0d0d..afecb18 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.884">HBaseFsck.RegionBoundariesInformation</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.883">HBaseFsck.RegionBoundariesInformation</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -219,7 +219,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>regionName</h4>
-<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.885">regionName</a></pre>
+<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.884">regionName</a></pre>
 </li>
 </ul>
 <a name="metaFirstKey">
@@ -228,7 +228,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>metaFirstKey</h4>
-<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.886">metaFirstKey</a></pre>
+<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.885">metaFirstKey</a></pre>
 </li>
 </ul>
 <a name="metaLastKey">
@@ -237,7 +237,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>metaLastKey</h4>
-<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.887">metaLastKey</a></pre>
+<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.886">metaLastKey</a></pre>
 </li>
 </ul>
 <a name="storesFirstKey">
@@ -246,7 +246,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>storesFirstKey</h4>
-<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.888">storesFirstKey</a></pre>
+<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.887">storesFirstKey</a></pre>
 </li>
 </ul>
 <a name="storesLastKey">
@@ -255,7 +255,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>storesLastKey</h4>
-<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.889">storesLastKey</a></pre>
+<pre>public&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.888">storesLastKey</a></pre>
 </li>
 </ul>
 </li>
@@ -272,7 +272,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RegionBoundariesInformation</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.884">RegionBoundariesInformation</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.883">RegionBoundariesInformation</a>()</pre>
 </li>
 </ul>
 </li>
@@ -289,7 +289,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.891">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html#line.890">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
index bdedcea..0fc9527 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
@@ -126,7 +126,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1326">HBaseFsck.RegionRepairException</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1325">HBaseFsck.RegionRepairException</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Exception thrown when a integrity repair operation fails in an
  unresolvable way.</div>
@@ -221,7 +221,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockList">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1327">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1326">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.RegionRepairException.serialVersionUID">Constant Field Values</a></dd>
@@ -234,7 +234,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ioe</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1328">ioe</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1327">ioe</a></pre>
 </li>
 </ul>
 </li>
@@ -251,7 +251,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RegionRepairException</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1329">RegionRepairException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;s,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1328">RegionRepairException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;s,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>&nbsp;ioe)</pre>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
index c949650..8361276 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3022">HBaseFsck.TableInfo.HDFSIntegrityFixer</a>
+<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3016">HBaseFsck.TableInfo.HDFSIntegrityFixer</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo.IntegrityFixSuggester</a></pre>
 <div class="block">This handler fixes integrity errors from hdfs information.  There are
  basically three classes of integrity problems 1) holes, 2) overlaps, and
@@ -295,7 +295,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3023">conf</a></pre>
+<pre>org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3017">conf</a></pre>
 </li>
 </ul>
 <a name="fixOverlaps">
@@ -304,7 +304,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fixOverlaps</h4>
-<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3025">fixOverlaps</a></pre>
+<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3019">fixOverlaps</a></pre>
 </li>
 </ul>
 </li>
@@ -321,7 +321,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HDFSIntegrityFixer</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3027">HDFSIntegrityFixer</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3021">HDFSIntegrityFixer</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
                    <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
                    org.apache.hadoop.conf.Configuration&nbsp;conf,
                    boolean&nbsp;fixHoles,
@@ -342,7 +342,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionStartKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3041">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;next)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3035">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;next)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This is a special case hole -- when the first region of a table is
  missing from META, HBase doesn't acknowledge the existance of the
@@ -367,7 +367,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionEndKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3061">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3055">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleRegionEndKeyNotEmpty-byte:A-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling case where a Table has a last region that does not
@@ -391,7 +391,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleHoleInRegionChain</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3083">handleHoleInRegionChain</a>(byte[]&nbsp;holeStartKey,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3077">handleHoleInRegionChain</a>(byte[]&nbsp;holeStartKey,
                                     byte[]&nbsp;holeStopKey)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">There is a hole in the hdfs regions that violates the table integrity
@@ -415,7 +415,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleOverlapGroup</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3113">handleOverlapGroup</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3107">handleOverlapGroup</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This takes set of overlapping regions and merges them into a single
  region.  This covers cases like degenerate regions, shared start key,
@@ -444,7 +444,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>removeParentsAndFixSplits</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3138">removeParentsAndFixSplits</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3132">removeParentsAndFixSplits</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -458,7 +458,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>mergeOverlaps</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3250">mergeOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3244">mergeOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -472,7 +472,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>sidelineBigOverlaps</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3328">sidelineBigOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;bigOverlap)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3322">sidelineBigOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;bigOverlap)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Sideline some regions in a big overlap group so that it
  will have fewer regions, and it is easier to merge them later on.</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
index f4a6210..2842fe9 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2938">HBaseFsck.TableInfo.IntegrityFixSuggester</a>
+<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2932">HBaseFsck.TableInfo.IntegrityFixSuggester</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html" title="class in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandlerImpl</a></pre>
 </li>
 </ul>
@@ -267,7 +267,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockListLast">
 <li class="blockList">
 <h4>errors</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2939">errors</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2933">errors</a></pre>
 </li>
 </ul>
 </li>
@@ -284,7 +284,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockListLast">
 <li class="blockList">
 <h4>IntegrityFixSuggester</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2941">IntegrityFixSuggester</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2935">IntegrityFixSuggester</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
                       <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors)</pre>
 </li>
 </ul>
@@ -302,7 +302,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionStartKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2947">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2941">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleRegionStartKeyNotEmpty-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling case where a Table has a first region that does not
@@ -327,7 +327,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionEndKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2955">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2949">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleRegionEndKeyNotEmpty-byte:A-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling case where a Table has a last region that does not
@@ -351,7 +351,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleDegenerateRegion</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2962">handleDegenerateRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2956">handleDegenerateRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleDegenerateRegion-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling a region that has the same start and end key.</div>
@@ -373,7 +373,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleDuplicateStartKeys</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2968">handleDuplicateStartKeys</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2962">handleDuplicateStartKeys</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
                                      <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r2)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleDuplicateStartKeys-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
@@ -398,7 +398,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleSplit</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2980">handleSplit</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2974">handleSplit</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r2)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html#handleSplit-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandler</a></code></span></div>
@@ -419,7 +419,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleOverlapInRegionChain</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2992">handleOverlapInRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi1,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2986">handleOverlapInRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi1,
                                        <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi2)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleOverlapInRegionChain-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
@@ -446,7 +446,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockListLast">
 <li class="blockList">
 <h4>handleHoleInRegionChain</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2999">handleHoleInRegionChain</a>(byte[]&nbsp;holeStart,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2993">handleHoleInRegionChain</a>(byte[]&nbsp;holeStart,
                                     byte[]&nbsp;holeStop)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleHoleInRegionChain-byte:A-byte:A-">TableIntegrityErrorHandlerImpl</a></code></span></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
index e71f31c..b0efaac 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2843">HBaseFsck.TableInfo</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2837">HBaseFsck.TableInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Maintain information about a particular table.</div>
 </li>
@@ -293,7 +293,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tableName</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2844">tableName</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2838">tableName</a></pre>
 </li>
 </ul>
 <a name="deployedOn">
@@ -302,7 +302,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedOn</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/TreeSet.html?is-external=true" title="class or interface in java.util">TreeSet</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2845">deployedOn</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/TreeSet.html?is-external=true" title="class or interface in java.util">TreeSet</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2839">deployedOn</a></pre>
 </li>
 </ul>
 <a name="backwards">
@@ -311,7 +311,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>backwards</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2848">backwards</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2842">backwards</a></pre>
 </li>
 </ul>
 <a name="sidelinedRegions">
@@ -320,7 +320,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelinedRegions</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.Path,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2851">sidelinedRegions</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.Path,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2845">sidelinedRegions</a></pre>
 </li>
 </ul>
 <a name="sc">
@@ -329,7 +329,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>sc</h4>
-<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/RegionSplitCalculator.html" title="class in org.apache.hadoop.hbase.util">RegionSplitCalculator</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2854">sc</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/RegionSplitCalculator.html" title="class in org.apache.hadoop.hbase.util">RegionSplitCalculator</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2848">sc</a></pre>
 </li>
 </ul>
 <a name="htds">
@@ -338,7 +338,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>htds</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2857">htds</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2851">htds</a></pre>
 </li>
 </ul>
 <a name="overlapGroups">
@@ -347,7 +347,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>overlapGroups</h4>
-<pre>final&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2860">overlapGroups</a></pre>
+<pre>final&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2854">overlapGroups</a></pre>
 </li>
 </ul>
 <a name="regionsFromMeta">
@@ -356,7 +356,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>regionsFromMeta</h4>
-<pre>private&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2864">regionsFromMeta</a></pre>
+<pre>private&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2858">regionsFromMeta</a></pre>
 </li>
 </ul>
 </li>
@@ -373,7 +373,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TableInfo</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2866">TableInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;name)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2860">TableInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;name)</pre>
 </li>
 </ul>
 </li>
@@ -390,7 +390,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getHTD</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2874">getHTD</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2868">getHTD</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>descriptor common to all regions.  null if are none or multiple!</dd>
@@ -403,7 +403,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addRegionInfo</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2884">addRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hir)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2878">addRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hir)</pre>
 </li>
 </ul>
 <a name="addServer-org.apache.hadoop.hbase.ServerName-">
@@ -412,7 +412,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2909">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2903">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server)</pre>
 </li>
 </ul>
 <a name="getName--">
@@ -421,7 +421,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2913">getName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2907">getName</a>()</pre>
 </li>
 </ul>
 <a name="getNumRegions--">
@@ -430,7 +430,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumRegions</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2917">getNumRegions</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2911">getNumRegions</a>()</pre>
 </li>
 </ul>
 <a name="getRegionsFromMeta--">
@@ -439,7 +439,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionsFromMeta</h4>
-<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2921">getRegionsFromMeta</a>()</pre>
+<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2915">getRegionsFromMeta</a>()</pre>
 </li>
 </ul>
 <a name="checkRegionChain-org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler-">
@@ -448,7 +448,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRegionChain</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3376">checkRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3370">checkRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check the region chain (from META) of this table.  We are looking for
  holes, overlaps, and cycles.</div>
@@ -466,7 +466,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>handleOverlapsParallel</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3506">handleOverlapsParallel</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3500">handleOverlapsParallel</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler,
                                        byte[]&nbsp;prevKey)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -481,7 +481,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>dump</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3543">dump</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true" title="class or interface in java.util">SortedSet</a>&lt;byte[]&gt;&nbsp;splits,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3537">dump</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true" title="class or interface in java.util">SortedSet</a>&lt;byte[]&gt;&nbsp;splits,
           org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;regions)</pre>
 <div class="block">This dumps data in a visually reasonable way for visual debugging</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
index b78cf5f..dd4d462 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4404">HBaseFsck.WorkItemHdfsDir</a>
+<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4398">HBaseFsck.WorkItemHdfsDir</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 <div class="block">Contact hdfs and get all information about specified table directory into
@@ -218,7 +218,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>tableDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileStatus <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4405">tableDir</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileStatus <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4399">tableDir</a></pre>
 </li>
 </ul>
 <a name="errors">
@@ -227,7 +227,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>errors</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4406">errors</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4400">errors</a></pre>
 </li>
 </ul>
 <a name="fs">
@@ -236,7 +236,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4407">fs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4401">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -253,7 +253,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemHdfsDir</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4409">WorkItemHdfsDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4403">WorkItemHdfsDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                 <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
                 org.apache.hadoop.fs.FileStatus&nbsp;status)</pre>
 </li>
@@ -272,7 +272,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4417">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4411">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true" title="class or interface in java.util.concurrent">ExecutionException</a></pre>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
index 1a11c40..a831646 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4524">HBaseFsck.WorkItemHdfsRegionInfo</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4518">HBaseFsck.WorkItemHdfsRegionInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 <div class="block">Contact hdfs and get all information about specified table directory into
@@ -218,7 +218,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>hbi</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4525">hbi</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4519">hbi</a></pre>
 </li>
 </ul>
 <a name="hbck">
@@ -227,7 +227,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>hbck</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4526">hbck</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4520">hbck</a></pre>
 </li>
 </ul>
 <a name="errors">
@@ -236,7 +236,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>errors</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4527">errors</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4521">errors</a></pre>
 </li>
 </ul>
 </li>
@@ -253,7 +253,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemHdfsRegionInfo</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4529">WorkItemHdfsRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4523">WorkItemHdfsRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi,
                        <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
                        <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors)</pre>
 </li>
@@ -272,7 +272,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4536">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4530">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>


[21/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
index 809f66f..9b60dd0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
@@ -765,146 +765,145 @@
 <span class="sourceLineNo">757</span>        found.set(true);<a name="line.757"></a>
 <span class="sourceLineNo">758</span>        try {<a name="line.758"></a>
 <span class="sourceLineNo">759</span>          boolean rootMetaFound =<a name="line.759"></a>
-<span class="sourceLineNo">760</span>              masterServices.getMetaTableLocator().verifyMetaRegionLocation(<a name="line.760"></a>
-<span class="sourceLineNo">761</span>                  conn, masterServices.getZooKeeper(), 1);<a name="line.761"></a>
-<span class="sourceLineNo">762</span>          if (rootMetaFound) {<a name="line.762"></a>
-<span class="sourceLineNo">763</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>              @Override<a name="line.764"></a>
-<span class="sourceLineNo">765</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.766"></a>
-<span class="sourceLineNo">767</span>                if (info != null) {<a name="line.767"></a>
-<span class="sourceLineNo">768</span>                  Cell serverCell =<a name="line.768"></a>
-<span class="sourceLineNo">769</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.769"></a>
-<span class="sourceLineNo">770</span>                          HConstants.SERVER_QUALIFIER);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.771"></a>
-<span class="sourceLineNo">772</span>                    ServerName sn =<a name="line.772"></a>
-<span class="sourceLineNo">773</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>                    if (sn == null) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>                      found.set(false);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.776"></a>
-<span class="sourceLineNo">777</span>                      try {<a name="line.777"></a>
-<span class="sourceLineNo">778</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.778"></a>
-<span class="sourceLineNo">779</span>                        ClientProtos.GetRequest request =<a name="line.779"></a>
-<span class="sourceLineNo">780</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.780"></a>
-<span class="sourceLineNo">781</span>                                new Get(ROW_KEY));<a name="line.781"></a>
-<span class="sourceLineNo">782</span>                        rs.get(null, request);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>                        assignedRegions.add(info);<a name="line.783"></a>
-<span class="sourceLineNo">784</span>                      } catch(Exception ex) {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>                      }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>                    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>                    foundRegions.add(info);<a name="line.788"></a>
-<span class="sourceLineNo">789</span>                  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span>                }<a name="line.790"></a>
-<span class="sourceLineNo">791</span>                return true;<a name="line.791"></a>
-<span class="sourceLineNo">792</span>              }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>            };<a name="line.793"></a>
-<span class="sourceLineNo">794</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.794"></a>
-<span class="sourceLineNo">795</span>            // if no regions in meta then we have to create the table<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>              createRSGroupTable();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>              createSent = true;<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            }<a name="line.799"></a>
-<span class="sourceLineNo">800</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.800"></a>
-<span class="sourceLineNo">801</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.801"></a>
-<span class="sourceLineNo">802</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.803"></a>
-<span class="sourceLineNo">804</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>          } else {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.806"></a>
-<span class="sourceLineNo">807</span>            found.set(false);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>          }<a name="line.808"></a>
-<span class="sourceLineNo">809</span>          if (found.get()) {<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            online = true;<a name="line.812"></a>
-<span class="sourceLineNo">813</span>            //flush any inconsistencies between ZK and HTable<a name="line.813"></a>
-<span class="sourceLineNo">814</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.814"></a>
-<span class="sourceLineNo">815</span>          }<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        } catch (RuntimeException e) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          throw e;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        } catch(Exception e) {<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          found.set(false);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>          LOG.warn("Failed to perform check", e);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        try {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>          Thread.sleep(100);<a name="line.823"></a>
-<span class="sourceLineNo">824</span>        } catch (InterruptedException e) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          LOG.info("Sleep interrupted", e);<a name="line.825"></a>
-<span class="sourceLineNo">826</span>        }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      }<a name="line.827"></a>
-<span class="sourceLineNo">828</span>      return found.get();<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    }<a name="line.829"></a>
-<span class="sourceLineNo">830</span><a name="line.830"></a>
-<span class="sourceLineNo">831</span>    private void createRSGroupTable() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      // wait for region to be online<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int tries = 600;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.835"></a>
-<span class="sourceLineNo">836</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          &amp;&amp; tries &gt; 0) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>        try {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>          Thread.sleep(100);<a name="line.839"></a>
-<span class="sourceLineNo">840</span>        } catch (InterruptedException e) {<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          throw new IOException("Wait interrupted ", e);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        }<a name="line.842"></a>
-<span class="sourceLineNo">843</span>        tries--;<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      }<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      if(tries &lt;= 0) {<a name="line.845"></a>
-<span class="sourceLineNo">846</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      } else {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.849"></a>
-<span class="sourceLineNo">850</span>          throw new IOException("Failed to create group table. " +<a name="line.850"></a>
-<span class="sourceLineNo">851</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.851"></a>
-<span class="sourceLineNo">852</span>        }<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      }<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    }<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    public boolean isOnline() {<a name="line.856"></a>
-<span class="sourceLineNo">857</span>      return online;<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    }<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  }<a name="line.863"></a>
-<span class="sourceLineNo">864</span><a name="line.864"></a>
-<span class="sourceLineNo">865</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>    for (Mutation mutation : mutations) {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      if (mutation instanceof Put) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            mutation));<a name="line.873"></a>
-<span class="sourceLineNo">874</span>      } else if (mutation instanceof Delete) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        mmrBuilder.addMutationRequest(<a name="line.875"></a>
-<span class="sourceLineNo">876</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.876"></a>
-<span class="sourceLineNo">877</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.877"></a>
-<span class="sourceLineNo">878</span>                  MutationType.DELETE, mutation));<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      } else {<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.880"></a>
-<span class="sourceLineNo">881</span>          + mutation.getClass().getName());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span><a name="line.884"></a>
-<span class="sourceLineNo">885</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.886"></a>
-<span class="sourceLineNo">887</span>    try {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.888"></a>
-<span class="sourceLineNo">889</span>    } catch (ServiceException ex) {<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      ProtobufUtil.toIOException(ex);<a name="line.890"></a>
-<span class="sourceLineNo">891</span>    }<a name="line.891"></a>
-<span class="sourceLineNo">892</span>  }<a name="line.892"></a>
-<span class="sourceLineNo">893</span><a name="line.893"></a>
-<span class="sourceLineNo">894</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.896"></a>
-<span class="sourceLineNo">897</span>    }<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  }<a name="line.898"></a>
-<span class="sourceLineNo">899</span>}<a name="line.899"></a>
+<span class="sourceLineNo">760</span>            Utility.verifyMetaRegionLocation(conn, masterServices.getZooKeeper(), 1);<a name="line.760"></a>
+<span class="sourceLineNo">761</span>          if (rootMetaFound) {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.762"></a>
+<span class="sourceLineNo">763</span>              @Override<a name="line.763"></a>
+<span class="sourceLineNo">764</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.764"></a>
+<span class="sourceLineNo">765</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.765"></a>
+<span class="sourceLineNo">766</span>                if (info != null) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>                  Cell serverCell =<a name="line.767"></a>
+<span class="sourceLineNo">768</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.768"></a>
+<span class="sourceLineNo">769</span>                          HConstants.SERVER_QUALIFIER);<a name="line.769"></a>
+<span class="sourceLineNo">770</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.770"></a>
+<span class="sourceLineNo">771</span>                    ServerName sn =<a name="line.771"></a>
+<span class="sourceLineNo">772</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.772"></a>
+<span class="sourceLineNo">773</span>                    if (sn == null) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>                      found.set(false);<a name="line.774"></a>
+<span class="sourceLineNo">775</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.775"></a>
+<span class="sourceLineNo">776</span>                      try {<a name="line.776"></a>
+<span class="sourceLineNo">777</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.777"></a>
+<span class="sourceLineNo">778</span>                        ClientProtos.GetRequest request =<a name="line.778"></a>
+<span class="sourceLineNo">779</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.779"></a>
+<span class="sourceLineNo">780</span>                                new Get(ROW_KEY));<a name="line.780"></a>
+<span class="sourceLineNo">781</span>                        rs.get(null, request);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>                        assignedRegions.add(info);<a name="line.782"></a>
+<span class="sourceLineNo">783</span>                      } catch(Exception ex) {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.784"></a>
+<span class="sourceLineNo">785</span>                      }<a name="line.785"></a>
+<span class="sourceLineNo">786</span>                    }<a name="line.786"></a>
+<span class="sourceLineNo">787</span>                    foundRegions.add(info);<a name="line.787"></a>
+<span class="sourceLineNo">788</span>                  }<a name="line.788"></a>
+<span class="sourceLineNo">789</span>                }<a name="line.789"></a>
+<span class="sourceLineNo">790</span>                return true;<a name="line.790"></a>
+<span class="sourceLineNo">791</span>              }<a name="line.791"></a>
+<span class="sourceLineNo">792</span>            };<a name="line.792"></a>
+<span class="sourceLineNo">793</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.793"></a>
+<span class="sourceLineNo">794</span>            // if no regions in meta then we have to create the table<a name="line.794"></a>
+<span class="sourceLineNo">795</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.795"></a>
+<span class="sourceLineNo">796</span>              createRSGroupTable();<a name="line.796"></a>
+<span class="sourceLineNo">797</span>              createSent = true;<a name="line.797"></a>
+<span class="sourceLineNo">798</span>            }<a name="line.798"></a>
+<span class="sourceLineNo">799</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.799"></a>
+<span class="sourceLineNo">800</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.800"></a>
+<span class="sourceLineNo">801</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.801"></a>
+<span class="sourceLineNo">802</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.802"></a>
+<span class="sourceLineNo">803</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.803"></a>
+<span class="sourceLineNo">804</span>          } else {<a name="line.804"></a>
+<span class="sourceLineNo">805</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.805"></a>
+<span class="sourceLineNo">806</span>            found.set(false);<a name="line.806"></a>
+<span class="sourceLineNo">807</span>          }<a name="line.807"></a>
+<span class="sourceLineNo">808</span>          if (found.get()) {<a name="line.808"></a>
+<span class="sourceLineNo">809</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.809"></a>
+<span class="sourceLineNo">810</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.810"></a>
+<span class="sourceLineNo">811</span>            online = true;<a name="line.811"></a>
+<span class="sourceLineNo">812</span>            //flush any inconsistencies between ZK and HTable<a name="line.812"></a>
+<span class="sourceLineNo">813</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.813"></a>
+<span class="sourceLineNo">814</span>          }<a name="line.814"></a>
+<span class="sourceLineNo">815</span>        } catch (RuntimeException e) {<a name="line.815"></a>
+<span class="sourceLineNo">816</span>          throw e;<a name="line.816"></a>
+<span class="sourceLineNo">817</span>        } catch(Exception e) {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>          found.set(false);<a name="line.818"></a>
+<span class="sourceLineNo">819</span>          LOG.warn("Failed to perform check", e);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>        }<a name="line.820"></a>
+<span class="sourceLineNo">821</span>        try {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>          Thread.sleep(100);<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        } catch (InterruptedException e) {<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          LOG.info("Sleep interrupted", e);<a name="line.824"></a>
+<span class="sourceLineNo">825</span>        }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>      return found.get();<a name="line.827"></a>
+<span class="sourceLineNo">828</span>    }<a name="line.828"></a>
+<span class="sourceLineNo">829</span><a name="line.829"></a>
+<span class="sourceLineNo">830</span>    private void createRSGroupTable() throws IOException {<a name="line.830"></a>
+<span class="sourceLineNo">831</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.831"></a>
+<span class="sourceLineNo">832</span>      // wait for region to be online<a name="line.832"></a>
+<span class="sourceLineNo">833</span>      int tries = 600;<a name="line.833"></a>
+<span class="sourceLineNo">834</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.834"></a>
+<span class="sourceLineNo">835</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.835"></a>
+<span class="sourceLineNo">836</span>          &amp;&amp; tries &gt; 0) {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>        try {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>          Thread.sleep(100);<a name="line.838"></a>
+<span class="sourceLineNo">839</span>        } catch (InterruptedException e) {<a name="line.839"></a>
+<span class="sourceLineNo">840</span>          throw new IOException("Wait interrupted ", e);<a name="line.840"></a>
+<span class="sourceLineNo">841</span>        }<a name="line.841"></a>
+<span class="sourceLineNo">842</span>        tries--;<a name="line.842"></a>
+<span class="sourceLineNo">843</span>      }<a name="line.843"></a>
+<span class="sourceLineNo">844</span>      if(tries &lt;= 0) {<a name="line.844"></a>
+<span class="sourceLineNo">845</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.845"></a>
+<span class="sourceLineNo">846</span>      } else {<a name="line.846"></a>
+<span class="sourceLineNo">847</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.847"></a>
+<span class="sourceLineNo">848</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>          throw new IOException("Failed to create group table. " +<a name="line.849"></a>
+<span class="sourceLineNo">850</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.850"></a>
+<span class="sourceLineNo">851</span>        }<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      }<a name="line.852"></a>
+<span class="sourceLineNo">853</span>    }<a name="line.853"></a>
+<span class="sourceLineNo">854</span><a name="line.854"></a>
+<span class="sourceLineNo">855</span>    public boolean isOnline() {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      return online;<a name="line.856"></a>
+<span class="sourceLineNo">857</span>    }<a name="line.857"></a>
+<span class="sourceLineNo">858</span>  }<a name="line.858"></a>
+<span class="sourceLineNo">859</span><a name="line.859"></a>
+<span class="sourceLineNo">860</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.860"></a>
+<span class="sourceLineNo">861</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.861"></a>
+<span class="sourceLineNo">862</span>  }<a name="line.862"></a>
+<span class="sourceLineNo">863</span><a name="line.863"></a>
+<span class="sourceLineNo">864</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.864"></a>
+<span class="sourceLineNo">865</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.865"></a>
+<span class="sourceLineNo">866</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.866"></a>
+<span class="sourceLineNo">867</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.867"></a>
+<span class="sourceLineNo">868</span>    for (Mutation mutation : mutations) {<a name="line.868"></a>
+<span class="sourceLineNo">869</span>      if (mutation instanceof Put) {<a name="line.869"></a>
+<span class="sourceLineNo">870</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.870"></a>
+<span class="sourceLineNo">871</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.871"></a>
+<span class="sourceLineNo">872</span>            mutation));<a name="line.872"></a>
+<span class="sourceLineNo">873</span>      } else if (mutation instanceof Delete) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>        mmrBuilder.addMutationRequest(<a name="line.874"></a>
+<span class="sourceLineNo">875</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.875"></a>
+<span class="sourceLineNo">876</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.876"></a>
+<span class="sourceLineNo">877</span>                  MutationType.DELETE, mutation));<a name="line.877"></a>
+<span class="sourceLineNo">878</span>      } else {<a name="line.878"></a>
+<span class="sourceLineNo">879</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.879"></a>
+<span class="sourceLineNo">880</span>          + mutation.getClass().getName());<a name="line.880"></a>
+<span class="sourceLineNo">881</span>      }<a name="line.881"></a>
+<span class="sourceLineNo">882</span>    }<a name="line.882"></a>
+<span class="sourceLineNo">883</span><a name="line.883"></a>
+<span class="sourceLineNo">884</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.884"></a>
+<span class="sourceLineNo">885</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.885"></a>
+<span class="sourceLineNo">886</span>    try {<a name="line.886"></a>
+<span class="sourceLineNo">887</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.887"></a>
+<span class="sourceLineNo">888</span>    } catch (ServiceException ex) {<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      ProtobufUtil.toIOException(ex);<a name="line.889"></a>
+<span class="sourceLineNo">890</span>    }<a name="line.890"></a>
+<span class="sourceLineNo">891</span>  }<a name="line.891"></a>
+<span class="sourceLineNo">892</span><a name="line.892"></a>
+<span class="sourceLineNo">893</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.894"></a>
+<span class="sourceLineNo">895</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.895"></a>
+<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
+<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
+<span class="sourceLineNo">898</span>}<a name="line.898"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
index 809f66f..9b60dd0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.ServerEventsListenerThread.html
@@ -765,146 +765,145 @@
 <span class="sourceLineNo">757</span>        found.set(true);<a name="line.757"></a>
 <span class="sourceLineNo">758</span>        try {<a name="line.758"></a>
 <span class="sourceLineNo">759</span>          boolean rootMetaFound =<a name="line.759"></a>
-<span class="sourceLineNo">760</span>              masterServices.getMetaTableLocator().verifyMetaRegionLocation(<a name="line.760"></a>
-<span class="sourceLineNo">761</span>                  conn, masterServices.getZooKeeper(), 1);<a name="line.761"></a>
-<span class="sourceLineNo">762</span>          if (rootMetaFound) {<a name="line.762"></a>
-<span class="sourceLineNo">763</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>              @Override<a name="line.764"></a>
-<span class="sourceLineNo">765</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.766"></a>
-<span class="sourceLineNo">767</span>                if (info != null) {<a name="line.767"></a>
-<span class="sourceLineNo">768</span>                  Cell serverCell =<a name="line.768"></a>
-<span class="sourceLineNo">769</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.769"></a>
-<span class="sourceLineNo">770</span>                          HConstants.SERVER_QUALIFIER);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.771"></a>
-<span class="sourceLineNo">772</span>                    ServerName sn =<a name="line.772"></a>
-<span class="sourceLineNo">773</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>                    if (sn == null) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>                      found.set(false);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.776"></a>
-<span class="sourceLineNo">777</span>                      try {<a name="line.777"></a>
-<span class="sourceLineNo">778</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.778"></a>
-<span class="sourceLineNo">779</span>                        ClientProtos.GetRequest request =<a name="line.779"></a>
-<span class="sourceLineNo">780</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.780"></a>
-<span class="sourceLineNo">781</span>                                new Get(ROW_KEY));<a name="line.781"></a>
-<span class="sourceLineNo">782</span>                        rs.get(null, request);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>                        assignedRegions.add(info);<a name="line.783"></a>
-<span class="sourceLineNo">784</span>                      } catch(Exception ex) {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>                      }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>                    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>                    foundRegions.add(info);<a name="line.788"></a>
-<span class="sourceLineNo">789</span>                  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span>                }<a name="line.790"></a>
-<span class="sourceLineNo">791</span>                return true;<a name="line.791"></a>
-<span class="sourceLineNo">792</span>              }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>            };<a name="line.793"></a>
-<span class="sourceLineNo">794</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.794"></a>
-<span class="sourceLineNo">795</span>            // if no regions in meta then we have to create the table<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>              createRSGroupTable();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>              createSent = true;<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            }<a name="line.799"></a>
-<span class="sourceLineNo">800</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.800"></a>
-<span class="sourceLineNo">801</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.801"></a>
-<span class="sourceLineNo">802</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.803"></a>
-<span class="sourceLineNo">804</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>          } else {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.806"></a>
-<span class="sourceLineNo">807</span>            found.set(false);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>          }<a name="line.808"></a>
-<span class="sourceLineNo">809</span>          if (found.get()) {<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            online = true;<a name="line.812"></a>
-<span class="sourceLineNo">813</span>            //flush any inconsistencies between ZK and HTable<a name="line.813"></a>
-<span class="sourceLineNo">814</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.814"></a>
-<span class="sourceLineNo">815</span>          }<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        } catch (RuntimeException e) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          throw e;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        } catch(Exception e) {<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          found.set(false);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>          LOG.warn("Failed to perform check", e);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        try {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>          Thread.sleep(100);<a name="line.823"></a>
-<span class="sourceLineNo">824</span>        } catch (InterruptedException e) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          LOG.info("Sleep interrupted", e);<a name="line.825"></a>
-<span class="sourceLineNo">826</span>        }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      }<a name="line.827"></a>
-<span class="sourceLineNo">828</span>      return found.get();<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    }<a name="line.829"></a>
-<span class="sourceLineNo">830</span><a name="line.830"></a>
-<span class="sourceLineNo">831</span>    private void createRSGroupTable() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      // wait for region to be online<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int tries = 600;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.835"></a>
-<span class="sourceLineNo">836</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          &amp;&amp; tries &gt; 0) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>        try {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>          Thread.sleep(100);<a name="line.839"></a>
-<span class="sourceLineNo">840</span>        } catch (InterruptedException e) {<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          throw new IOException("Wait interrupted ", e);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        }<a name="line.842"></a>
-<span class="sourceLineNo">843</span>        tries--;<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      }<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      if(tries &lt;= 0) {<a name="line.845"></a>
-<span class="sourceLineNo">846</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      } else {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.849"></a>
-<span class="sourceLineNo">850</span>          throw new IOException("Failed to create group table. " +<a name="line.850"></a>
-<span class="sourceLineNo">851</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.851"></a>
-<span class="sourceLineNo">852</span>        }<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      }<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    }<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    public boolean isOnline() {<a name="line.856"></a>
-<span class="sourceLineNo">857</span>      return online;<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    }<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  }<a name="line.863"></a>
-<span class="sourceLineNo">864</span><a name="line.864"></a>
-<span class="sourceLineNo">865</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>    for (Mutation mutation : mutations) {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      if (mutation instanceof Put) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            mutation));<a name="line.873"></a>
-<span class="sourceLineNo">874</span>      } else if (mutation instanceof Delete) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        mmrBuilder.addMutationRequest(<a name="line.875"></a>
-<span class="sourceLineNo">876</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.876"></a>
-<span class="sourceLineNo">877</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.877"></a>
-<span class="sourceLineNo">878</span>                  MutationType.DELETE, mutation));<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      } else {<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.880"></a>
-<span class="sourceLineNo">881</span>          + mutation.getClass().getName());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span><a name="line.884"></a>
-<span class="sourceLineNo">885</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.886"></a>
-<span class="sourceLineNo">887</span>    try {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.888"></a>
-<span class="sourceLineNo">889</span>    } catch (ServiceException ex) {<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      ProtobufUtil.toIOException(ex);<a name="line.890"></a>
-<span class="sourceLineNo">891</span>    }<a name="line.891"></a>
-<span class="sourceLineNo">892</span>  }<a name="line.892"></a>
-<span class="sourceLineNo">893</span><a name="line.893"></a>
-<span class="sourceLineNo">894</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.896"></a>
-<span class="sourceLineNo">897</span>    }<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  }<a name="line.898"></a>
-<span class="sourceLineNo">899</span>}<a name="line.899"></a>
+<span class="sourceLineNo">760</span>            Utility.verifyMetaRegionLocation(conn, masterServices.getZooKeeper(), 1);<a name="line.760"></a>
+<span class="sourceLineNo">761</span>          if (rootMetaFound) {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.762"></a>
+<span class="sourceLineNo">763</span>              @Override<a name="line.763"></a>
+<span class="sourceLineNo">764</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.764"></a>
+<span class="sourceLineNo">765</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.765"></a>
+<span class="sourceLineNo">766</span>                if (info != null) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>                  Cell serverCell =<a name="line.767"></a>
+<span class="sourceLineNo">768</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.768"></a>
+<span class="sourceLineNo">769</span>                          HConstants.SERVER_QUALIFIER);<a name="line.769"></a>
+<span class="sourceLineNo">770</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.770"></a>
+<span class="sourceLineNo">771</span>                    ServerName sn =<a name="line.771"></a>
+<span class="sourceLineNo">772</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.772"></a>
+<span class="sourceLineNo">773</span>                    if (sn == null) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>                      found.set(false);<a name="line.774"></a>
+<span class="sourceLineNo">775</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.775"></a>
+<span class="sourceLineNo">776</span>                      try {<a name="line.776"></a>
+<span class="sourceLineNo">777</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.777"></a>
+<span class="sourceLineNo">778</span>                        ClientProtos.GetRequest request =<a name="line.778"></a>
+<span class="sourceLineNo">779</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.779"></a>
+<span class="sourceLineNo">780</span>                                new Get(ROW_KEY));<a name="line.780"></a>
+<span class="sourceLineNo">781</span>                        rs.get(null, request);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>                        assignedRegions.add(info);<a name="line.782"></a>
+<span class="sourceLineNo">783</span>                      } catch(Exception ex) {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.784"></a>
+<span class="sourceLineNo">785</span>                      }<a name="line.785"></a>
+<span class="sourceLineNo">786</span>                    }<a name="line.786"></a>
+<span class="sourceLineNo">787</span>                    foundRegions.add(info);<a name="line.787"></a>
+<span class="sourceLineNo">788</span>                  }<a name="line.788"></a>
+<span class="sourceLineNo">789</span>                }<a name="line.789"></a>
+<span class="sourceLineNo">790</span>                return true;<a name="line.790"></a>
+<span class="sourceLineNo">791</span>              }<a name="line.791"></a>
+<span class="sourceLineNo">792</span>            };<a name="line.792"></a>
+<span class="sourceLineNo">793</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.793"></a>
+<span class="sourceLineNo">794</span>            // if no regions in meta then we have to create the table<a name="line.794"></a>
+<span class="sourceLineNo">795</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.795"></a>
+<span class="sourceLineNo">796</span>              createRSGroupTable();<a name="line.796"></a>
+<span class="sourceLineNo">797</span>              createSent = true;<a name="line.797"></a>
+<span class="sourceLineNo">798</span>            }<a name="line.798"></a>
+<span class="sourceLineNo">799</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.799"></a>
+<span class="sourceLineNo">800</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.800"></a>
+<span class="sourceLineNo">801</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.801"></a>
+<span class="sourceLineNo">802</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.802"></a>
+<span class="sourceLineNo">803</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.803"></a>
+<span class="sourceLineNo">804</span>          } else {<a name="line.804"></a>
+<span class="sourceLineNo">805</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.805"></a>
+<span class="sourceLineNo">806</span>            found.set(false);<a name="line.806"></a>
+<span class="sourceLineNo">807</span>          }<a name="line.807"></a>
+<span class="sourceLineNo">808</span>          if (found.get()) {<a name="line.808"></a>
+<span class="sourceLineNo">809</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.809"></a>
+<span class="sourceLineNo">810</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.810"></a>
+<span class="sourceLineNo">811</span>            online = true;<a name="line.811"></a>
+<span class="sourceLineNo">812</span>            //flush any inconsistencies between ZK and HTable<a name="line.812"></a>
+<span class="sourceLineNo">813</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.813"></a>
+<span class="sourceLineNo">814</span>          }<a name="line.814"></a>
+<span class="sourceLineNo">815</span>        } catch (RuntimeException e) {<a name="line.815"></a>
+<span class="sourceLineNo">816</span>          throw e;<a name="line.816"></a>
+<span class="sourceLineNo">817</span>        } catch(Exception e) {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>          found.set(false);<a name="line.818"></a>
+<span class="sourceLineNo">819</span>          LOG.warn("Failed to perform check", e);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>        }<a name="line.820"></a>
+<span class="sourceLineNo">821</span>        try {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>          Thread.sleep(100);<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        } catch (InterruptedException e) {<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          LOG.info("Sleep interrupted", e);<a name="line.824"></a>
+<span class="sourceLineNo">825</span>        }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>      return found.get();<a name="line.827"></a>
+<span class="sourceLineNo">828</span>    }<a name="line.828"></a>
+<span class="sourceLineNo">829</span><a name="line.829"></a>
+<span class="sourceLineNo">830</span>    private void createRSGroupTable() throws IOException {<a name="line.830"></a>
+<span class="sourceLineNo">831</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.831"></a>
+<span class="sourceLineNo">832</span>      // wait for region to be online<a name="line.832"></a>
+<span class="sourceLineNo">833</span>      int tries = 600;<a name="line.833"></a>
+<span class="sourceLineNo">834</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.834"></a>
+<span class="sourceLineNo">835</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.835"></a>
+<span class="sourceLineNo">836</span>          &amp;&amp; tries &gt; 0) {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>        try {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>          Thread.sleep(100);<a name="line.838"></a>
+<span class="sourceLineNo">839</span>        } catch (InterruptedException e) {<a name="line.839"></a>
+<span class="sourceLineNo">840</span>          throw new IOException("Wait interrupted ", e);<a name="line.840"></a>
+<span class="sourceLineNo">841</span>        }<a name="line.841"></a>
+<span class="sourceLineNo">842</span>        tries--;<a name="line.842"></a>
+<span class="sourceLineNo">843</span>      }<a name="line.843"></a>
+<span class="sourceLineNo">844</span>      if(tries &lt;= 0) {<a name="line.844"></a>
+<span class="sourceLineNo">845</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.845"></a>
+<span class="sourceLineNo">846</span>      } else {<a name="line.846"></a>
+<span class="sourceLineNo">847</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.847"></a>
+<span class="sourceLineNo">848</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>          throw new IOException("Failed to create group table. " +<a name="line.849"></a>
+<span class="sourceLineNo">850</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.850"></a>
+<span class="sourceLineNo">851</span>        }<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      }<a name="line.852"></a>
+<span class="sourceLineNo">853</span>    }<a name="line.853"></a>
+<span class="sourceLineNo">854</span><a name="line.854"></a>
+<span class="sourceLineNo">855</span>    public boolean isOnline() {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      return online;<a name="line.856"></a>
+<span class="sourceLineNo">857</span>    }<a name="line.857"></a>
+<span class="sourceLineNo">858</span>  }<a name="line.858"></a>
+<span class="sourceLineNo">859</span><a name="line.859"></a>
+<span class="sourceLineNo">860</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.860"></a>
+<span class="sourceLineNo">861</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.861"></a>
+<span class="sourceLineNo">862</span>  }<a name="line.862"></a>
+<span class="sourceLineNo">863</span><a name="line.863"></a>
+<span class="sourceLineNo">864</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.864"></a>
+<span class="sourceLineNo">865</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.865"></a>
+<span class="sourceLineNo">866</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.866"></a>
+<span class="sourceLineNo">867</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.867"></a>
+<span class="sourceLineNo">868</span>    for (Mutation mutation : mutations) {<a name="line.868"></a>
+<span class="sourceLineNo">869</span>      if (mutation instanceof Put) {<a name="line.869"></a>
+<span class="sourceLineNo">870</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.870"></a>
+<span class="sourceLineNo">871</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.871"></a>
+<span class="sourceLineNo">872</span>            mutation));<a name="line.872"></a>
+<span class="sourceLineNo">873</span>      } else if (mutation instanceof Delete) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>        mmrBuilder.addMutationRequest(<a name="line.874"></a>
+<span class="sourceLineNo">875</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.875"></a>
+<span class="sourceLineNo">876</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.876"></a>
+<span class="sourceLineNo">877</span>                  MutationType.DELETE, mutation));<a name="line.877"></a>
+<span class="sourceLineNo">878</span>      } else {<a name="line.878"></a>
+<span class="sourceLineNo">879</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.879"></a>
+<span class="sourceLineNo">880</span>          + mutation.getClass().getName());<a name="line.880"></a>
+<span class="sourceLineNo">881</span>      }<a name="line.881"></a>
+<span class="sourceLineNo">882</span>    }<a name="line.882"></a>
+<span class="sourceLineNo">883</span><a name="line.883"></a>
+<span class="sourceLineNo">884</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.884"></a>
+<span class="sourceLineNo">885</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.885"></a>
+<span class="sourceLineNo">886</span>    try {<a name="line.886"></a>
+<span class="sourceLineNo">887</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.887"></a>
+<span class="sourceLineNo">888</span>    } catch (ServiceException ex) {<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      ProtobufUtil.toIOException(ex);<a name="line.889"></a>
+<span class="sourceLineNo">890</span>    }<a name="line.890"></a>
+<span class="sourceLineNo">891</span>  }<a name="line.891"></a>
+<span class="sourceLineNo">892</span><a name="line.892"></a>
+<span class="sourceLineNo">893</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.894"></a>
+<span class="sourceLineNo">895</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.895"></a>
+<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
+<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
+<span class="sourceLineNo">898</span>}<a name="line.898"></a>
 
 
 


[02/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          

<TRUNCATED>

[24/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
index ea05301..26a93dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
@@ -269,3590 +269,3574 @@
 <span class="sourceLineNo">261</span>   */<a name="line.261"></a>
 <span class="sourceLineNo">262</span>  protected ClusterConnection clusterConnection;<a name="line.262"></a>
 <span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /*<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Long-living meta table locator, which is created when the server is started and stopped<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * when server shuts down. References to this locator shall be used to perform according<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * operations in EventHandlers. Primary reason for this decision is to make it mockable<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * for tests.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  protected MetaTableLocator metaTableLocator;<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * Go here to get table descriptors.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  protected TableDescriptors tableDescriptors;<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>  // Replication services. If no replication, this handler will be null.<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  // Compactions<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public CompactSplit compactSplitThread;<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Map of regions currently being served by this region server. Key is the<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * encoded region name.  All access should be synchronized.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /**<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * and here we really mean DataNode locations.<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  // Leases<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  protected Leases leases;<a name="line.303"></a>
+<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * Go here to get table descriptors.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  protected TableDescriptors tableDescriptors;<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>  // Replication services. If no replication, this handler will be null.<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  // Compactions<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public CompactSplit compactSplitThread;<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /**<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * Map of regions currently being served by this region server. Key is the<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * encoded region name.  All access should be synchronized.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.280"></a>
+<span class="sourceLineNo">281</span><a name="line.281"></a>
+<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
+<span class="sourceLineNo">283</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * and here we really mean DataNode locations.<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   */<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  // Leases<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  protected Leases leases;<a name="line.295"></a>
+<span class="sourceLineNo">296</span><a name="line.296"></a>
+<span class="sourceLineNo">297</span>  // Instance of the hbase executor executorService.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  protected ExecutorService executorService;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  // If false, the file system has become unavailable<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  protected volatile boolean fsOk;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  protected HFileSystem fs;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  protected HFileSystem walFs;<a name="line.303"></a>
 <span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>  // Instance of the hbase executor executorService.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  protected ExecutorService executorService;<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  // If false, the file system has become unavailable<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  protected volatile boolean fsOk;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  protected HFileSystem fs;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  protected HFileSystem walFs;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  // Set when a report to the master comes back with a message asking us to<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  // of HRegionServer in isolation.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private volatile boolean stopped = false;<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // debugging and unit tests.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private volatile boolean abortRequested;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  // Default abort timeout is 1200 seconds for safe<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  // Will run this task when abort timeout<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.325"></a>
+<span class="sourceLineNo">305</span>  // Set when a report to the master comes back with a message asking us to<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  // of HRegionServer in isolation.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private volatile boolean stopped = false;<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  // debugging and unit tests.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>  private volatile boolean abortRequested;<a name="line.312"></a>
+<span class="sourceLineNo">313</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  // Default abort timeout is 1200 seconds for safe<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  // Will run this task when abort timeout<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  // space regions.<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  private boolean stopping = false;<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>  volatile boolean killed = false;<a name="line.325"></a>
 <span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.327"></a>
+<span class="sourceLineNo">327</span>  private volatile boolean shutDown = false;<a name="line.327"></a>
 <span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  // space regions.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  private boolean stopping = false;<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  volatile boolean killed = false;<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  private volatile boolean shutDown = false;<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>  protected final Configuration conf;<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Path rootDir;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Path walRootDir;<a name="line.340"></a>
+<span class="sourceLineNo">329</span>  protected final Configuration conf;<a name="line.329"></a>
+<span class="sourceLineNo">330</span><a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private Path rootDir;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  private Path walRootDir;<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  final int numRetries;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  protected final int threadWakeFrequency;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  protected final int msgInterval;<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  protected final int numRegionsToReport;<a name="line.340"></a>
 <span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  final int numRetries;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  protected final int threadWakeFrequency;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected final int msgInterval;<a name="line.346"></a>
+<span class="sourceLineNo">342</span>  // Stub to do region server status calls against the master.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  RpcClient rpcClient;<a name="line.346"></a>
 <span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  protected final int numRegionsToReport;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  // Stub to do region server status calls against the master.<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  RpcClient rpcClient;<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.357"></a>
+<span class="sourceLineNo">348</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  // into web context.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  protected InfoServer infoServer;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  private JvmPauseMonitor pauseMonitor;<a name="line.357"></a>
 <span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  // into web context.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  protected InfoServer infoServer;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  private JvmPauseMonitor pauseMonitor;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** region server process name */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String REGIONSERVER = "regionserver";<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  MetricsRegionServer metricsRegionServer;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  MetricsTable metricsTable;<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  private SpanReceiverHost spanReceiverHost;<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private ChoreService choreService;<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /*<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Check for compactions requests.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  ScheduledChore compactionChecker;<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /*<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * Check for flushes<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  ScheduledChore periodicFlusher;<a name="line.387"></a>
-<span class="sourceLineNo">388</span><a name="line.388"></a>
-<span class="sourceLineNo">389</span>  protected volatile WALFactory walFactory;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // WAL roller. log is protected rather than private to avoid<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // eclipse warning when accessed by inner classes<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  protected LogRoller walRoller;<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // A thread which calls reportProcedureDone<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // flag set after we're done setting up server threads<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // zookeeper connection and watcher<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  protected final ZKWatcher zooKeeper;<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // master address tracker<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // Cluster Status Tracker<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  // Log Splitting Worker<a name="line.410"></a>
-<span class="sourceLineNo">411</span>  private SplitLogWorker splitLogWorker;<a name="line.411"></a>
+<span class="sourceLineNo">359</span>  /** region server process name */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public static final String REGIONSERVER = "regionserver";<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  MetricsRegionServer metricsRegionServer;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  MetricsTable metricsTable;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>  private SpanReceiverHost spanReceiverHost;<a name="line.364"></a>
+<span class="sourceLineNo">365</span><a name="line.365"></a>
+<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private ChoreService choreService;<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /*<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Check for compactions requests.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  ScheduledChore compactionChecker;<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  /*<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * Check for flushes<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   */<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  ScheduledChore periodicFlusher;<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  protected volatile WALFactory walFactory;<a name="line.381"></a>
+<span class="sourceLineNo">382</span><a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // WAL roller. log is protected rather than private to avoid<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // eclipse warning when accessed by inner classes<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected LogRoller walRoller;<a name="line.385"></a>
+<span class="sourceLineNo">386</span><a name="line.386"></a>
+<span class="sourceLineNo">387</span>  // A thread which calls reportProcedureDone<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.388"></a>
+<span class="sourceLineNo">389</span><a name="line.389"></a>
+<span class="sourceLineNo">390</span>  // flag set after we're done setting up server threads<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // zookeeper connection and watcher<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  protected final ZKWatcher zooKeeper;<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // master address tracker<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.397"></a>
+<span class="sourceLineNo">398</span><a name="line.398"></a>
+<span class="sourceLineNo">399</span>  // Cluster Status Tracker<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // Log Splitting Worker<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  private SplitLogWorker splitLogWorker;<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // A sleeper that sleeps for msgInterval.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  protected final Sleeper sleeper;<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private final int operationTimeout;<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  private final int shortOperationTimeout;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.411"></a>
 <span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>  // A sleeper that sleeps for msgInterval.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  protected final Sleeper sleeper;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  private final int operationTimeout;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  private final int shortOperationTimeout;<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.419"></a>
+<span class="sourceLineNo">413</span>  // Cache configuration and block cache reference<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  protected CacheConfig cacheConfig;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  // Cache configuration for mob<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  final MobCacheConfig mobCacheConfig;<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** The health check chore. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  private HealthCheckChore healthCheckChore;<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  // Cache configuration and block cache reference<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  protected CacheConfig cacheConfig;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  // Cache configuration for mob<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  final MobCacheConfig mobCacheConfig;<a name="line.424"></a>
+<span class="sourceLineNo">421</span>  /** The nonce manager chore. */<a name="line.421"></a>
+<span class="sourceLineNo">422</span>  private ScheduledChore nonceManagerChore;<a name="line.422"></a>
+<span class="sourceLineNo">423</span><a name="line.423"></a>
+<span class="sourceLineNo">424</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.424"></a>
 <span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>  /** The health check chore. */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>  private HealthCheckChore healthCheckChore;<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>  /** The nonce manager chore. */<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  private ScheduledChore nonceManagerChore;<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * against  Master.<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected ServerName serverName;<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /*<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * hostname specified by hostname config<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  protected String useThisHostnameInstead;<a name="line.444"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * against  Master.<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   */<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  protected ServerName serverName;<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /*<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * hostname specified by hostname config<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  protected String useThisHostnameInstead;<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  // key to the config parameter of server hostname<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  // both master and region server<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.441"></a>
+<span class="sourceLineNo">442</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.444"></a>
 <span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  // key to the config parameter of server hostname<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  // both master and region server<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  // Exception will be thrown if both are used.<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * This servers startcode.<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  protected final long startcode;<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * Unique identifier for the cluster we are a part of.<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  protected String clusterId;<a name="line.467"></a>
+<span class="sourceLineNo">446</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  // Exception will be thrown if both are used.<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * This servers startcode.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  protected final long startcode;<a name="line.454"></a>
+<span class="sourceLineNo">455</span><a name="line.455"></a>
+<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
+<span class="sourceLineNo">457</span>   * Unique identifier for the cluster we are a part of.<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   */<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  protected String clusterId;<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  /**<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * Chore to clean periodically the moved region list<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>  // chore for refreshing store files for secondary regions<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  private StorefileRefresherChore storefileRefresher;<a name="line.467"></a>
 <span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Chore to clean periodically the moved region list<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  // chore for refreshing store files for secondary regions<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  private StorefileRefresherChore storefileRefresher;<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  private RegionServerCoprocessorHost rsHost;<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>  /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * HBASE-3787) are:<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   */<a name="line.501"></a>
-<span class="sourceLineNo">502</span>  final ServerNonceManager nonceManager;<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>  private UserProvider userProvider;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  protected final RSRpcServices rpcServices;<a name="line.506"></a>
+<span class="sourceLineNo">469</span>  private RegionServerCoprocessorHost rsHost;<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /**<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   * HBASE-3787) are:<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.484"></a>
+<span class="sourceLineNo">485</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.485"></a>
+<span class="sourceLineNo">486</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  final ServerNonceManager nonceManager;<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  private UserProvider userProvider;<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>  protected final RSRpcServices rpcServices;<a name="line.498"></a>
+<span class="sourceLineNo">499</span><a name="line.499"></a>
+<span class="sourceLineNo">500</span>  protected CoordinatedStateManager csm;<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>  /**<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  protected final ConfigurationManager configurationManager;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  protected CoordinatedStateManager csm;<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   */<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  protected final ConfigurationManager configurationManager;<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.517"></a>
+<span class="sourceLineNo">508</span>  @VisibleForTesting<a name="line.508"></a>
+<span class="sourceLineNo">509</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.509"></a>
+<span class="sourceLineNo">510</span><a name="line.510"></a>
+<span class="sourceLineNo">511</span>  private volatile ThroughputController flushThroughputController;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.513"></a>
+<span class="sourceLineNo">514</span><a name="line.514"></a>
+<span class="sourceLineNo">515</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.515"></a>
+<span class="sourceLineNo">516</span><a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.517"></a>
 <span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  private volatile ThroughputController flushThroughputController;<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   */<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private final boolean masterless;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  /**<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * Starts a HRegionServer at the default location<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  // Don't start any services or managers in here in the Constructor.<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    super("RegionServer");  // thread name<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TraceUtil.initTracer(conf);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    try {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      this.startcode = System.currentTimeMillis();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      this.conf = conf;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.fsOk = true;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      HFile.checkHFileVersion(this.conf);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      checkCodecs(this.conf);<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.554"></a>
+<span class="sourceLineNo">519</span>  /**<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.523"></a>
+<span class="sourceLineNo">524</span>   */<a name="line.524"></a>
+<span class="sourceLineNo">525</span>  private final boolean masterless;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>   * Starts a HRegionServer at the default location<a name="line.529"></a>
+<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>  // Don't start any services or managers in here in the Constructor.<a name="line.531"></a>
+<span class="sourceLineNo">532</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    super("RegionServer");  // thread name<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    TraceUtil.initTracer(conf);<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    try {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      this.startcode = System.currentTimeMillis();<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      this.conf = conf;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>      this.fsOk = true;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      HFile.checkHFileVersion(this.conf);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      checkCodecs(this.conf);<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.546"></a>
+<span class="sourceLineNo">547</span><a name="line.547"></a>
+<span class="sourceLineNo">548</span>      // Disable usage of meta replicas in the regionserver<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      // Config'ed params<a name="line.550"></a>
+<span class="sourceLineNo">551</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.553"></a>
+<span class="sourceLineNo">554</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.554"></a>
 <span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>      // Disable usage of meta replicas in the regionserver<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      // Config'ed params<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.564"></a>
+<span class="sourceLineNo">556</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.556"></a>
+<span class="sourceLineNo">557</span><a name="line.557"></a>
+<span class="sourceLineNo">558</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.561"></a>
+<span class="sourceLineNo">562</span><a name="line.562"></a>
+<span class="sourceLineNo">563</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.564"></a>
 <span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.567"></a>
+<span class="sourceLineNo">566</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.566"></a>
+<span class="sourceLineNo">567</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.567"></a>
 <span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.571"></a>
-<span class="sourceLineNo">572</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.575"></a>
-<span class="sourceLineNo">576</span><a name="line.576"></a>
-<span class="sourceLineNo">577</span>      this.abortRequested = false;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      this.stopped = false;<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      rpcServices = createRpcServices();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String hostName =<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.583"></a>
-<span class="sourceLineNo">584</span>              : this.useThisHostnameInstead;<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // login the zookeeper client principal (if using security)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      // login the server principal (if using secure Hadoop)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      login(userProvider, hostName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
-<span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
-<span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
-<span class="sourceLineNo">608</span><a name="line.608"></a>
-<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
-<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
-<span class="sourceLineNo">633</span><a name="line.633"></a>
-<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
-<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
-<span class="sourceLineNo">661</span><a name="line.661"></a>
-<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span><a name="line.733"></a>
-<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
-<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
-<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
-<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">569</span>      this.abortRequested = false;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      this.stopped = false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span><a name="line.571"></a>
+<span class="sourceLineNo">572</span>      rpcServices = createRpcServices();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      String hostName =<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.575"></a>
+<span class="sourceLineNo">576</span>              : this.useThisHostnameInstead;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.579"></a>
+<span class="sourceLineNo">580</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>      // login the zookeeper client principal (if using security)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.583"></a>
+<span class="sourceLineNo">584</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.584"></a>
+<span class="sourceLineNo">585</span>      // login the server principal (if using secure Hadoop)<a name="line.585"></a>
+<span class="sourceLineNo">586</span>      login(userProvider, hostName);<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      // init superusers and add the server principal (if using security)<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      // or process owner as default super user.<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      Superusers.initialize(conf);<a name="line.589"></a>
+<span class="sourceLineNo">590</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.590"></a>
+<span class="sourceLineNo">591</span><a name="line.591"></a>
+<span class="sourceLineNo">592</span>      boolean isMasterNotCarryTable =<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>      // no need to instantiate global block cache when master not carry table<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      if (!isMasterNotCarryTable) {<a name="line.595"></a>
+<span class="sourceLineNo">596</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      }<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      cacheConfig = new CacheConfig(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.599"></a>
+<span class="sourceLineNo">600</span><a name="line.600"></a>
+<span class="sourceLineNo">601</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.601"></a>
+<span class="sourceLineNo">602</span>        @Override<a name="line.602"></a>
+<span class="sourceLineNo">603</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      };<a name="line.606"></a>
+<span class="sourceLineNo">607</span><a name="line.607"></a>
+<span class="sourceLineNo">608</span>      initializeFileSystem();<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.609"></a>
+<span class="sourceLineNo">610</span><a name="line.610"></a>
+<span class="sourceLineNo">611</span>      this.configurationManager = new ConfigurationManager();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.612"></a>
+<span class="sourceLineNo">613</span><a name="line.613"></a>
+<span class="sourceLineNo">614</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        // Open connection to zookeeper and set primary watcher<a name="line.616"></a>
+<span class="sourceLineNo">617</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        if (!this.masterless) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.621"></a>
+<span class="sourceLineNo">622</span><a name="line.622"></a>
+<span class="sourceLineNo">623</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          masterAddressTracker.start();<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          clusterStatusTracker.start();<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        } else {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          masterAddressTracker = null;<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          clusterStatusTracker = null;<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        }<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        zooKeeper = null;<a name="line.633"></a>
+<span class="sourceLineNo">634</span>        masterAddressTracker = null;<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        clusterStatusTracker = null;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      this.rpcServices.start(zooKeeper);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      // class HRS. TODO.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      this.choreService = new ChoreService(getName(), true);<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      this.executorService = new ExecutorService(getName());<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      putUpWebUI();<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    } catch (Throwable t) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // cause of failed startup is lost.<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      LOG.error("Failed construction RegionServer", t);<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      throw t;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
+<span class="sourceLineNo">653</span><a name="line.653"></a>
+<span class="sourceLineNo">654</span>  // HMaster should override this method to load the specific config for master<a name="line.654"></a>
+<span class="sourceLineNo">655</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.659"></a>
+<span class="sourceLineNo">660</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.660"></a>
+<span class="sourceLineNo">661</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.661"></a>
+<span class="sourceLineNo">662</span>        throw new IOException(msg);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>      } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>        return rpcServices.isa.getHostName();<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      return hostname;<a name="line.667"></a>
+<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
+<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
+<span class="sourceLineNo">670</span><a name="line.670"></a>
+<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>   * If running on Windows, do windows-specific setup.<a name="line.672"></a>
+<span class="sourceLineNo">673</span>   */<a name="line.673"></a>
+<span class="

<TRUNCATED>

[34/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
index 5832df4..2d5212e 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.91">ZKUtil</a>
+public final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.90">ZKUtil</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Internal HBase utility class for ZooKeeper.
 
@@ -738,7 +738,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.92">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.91">LOG</a></pre>
 </li>
 </ul>
 <a name="zkDumpConnectionTimeOut">
@@ -747,7 +747,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>zkDumpConnectionTimeOut</h4>
-<pre>private static&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.94">zkDumpConnectionTimeOut</a></pre>
+<pre>private static&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.93">zkDumpConnectionTimeOut</a></pre>
 </li>
 </ul>
 </li>
@@ -764,7 +764,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ZKUtil</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.96">ZKUtil</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.95">ZKUtil</a>()</pre>
 </li>
 </ul>
 </li>
@@ -781,7 +781,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>connect</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html" title="class in org.apache.hadoop.hbase.zookeeper">RecoverableZooKeeper</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.110">connect</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html" title="class in org.apache.hadoop.hbase.zookeeper">RecoverableZooKeeper</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.109">connect</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                            org.apache.zookeeper.Watcher&nbsp;watcher)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Creates a new connection to ZooKeeper, pulling settings and ensemble config
@@ -805,7 +805,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>connect</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html" title="class in org.apache.hadoop.hbase.zookeeper">RecoverableZooKeeper</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.116">connect</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html" title="class in org.apache.hadoop.hbase.zookeeper">RecoverableZooKeeper</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.115">connect</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ensemble,
                                            org.apache.zookeeper.Watcher&nbsp;watcher)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -821,7 +821,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>connect</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html" title="class in org.apache.hadoop.hbase.zookeeper">RecoverableZooKeeper</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.122">connect</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.html" title="class in org.apache.hadoop.hbase.zookeeper">RecoverableZooKeeper</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.121">connect</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;ensemble,
                                            org.apache.zookeeper.Watcher&nbsp;watcher,
                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;identifier)
@@ -838,7 +838,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>loginServer</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.157">loginServer</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.156">loginServer</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;keytabFileKey,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;userNameKey,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname)
@@ -866,7 +866,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>loginClient</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.178">loginClient</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.177">loginClient</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;keytabFileKey,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;userNameKey,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname)
@@ -894,7 +894,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>login</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.201">login</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.200">login</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;keytabFileKey,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;userNameKey,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname,
@@ -926,7 +926,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getParent</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.321">getParent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.320">getParent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)</pre>
 <div class="block">Returns the full path of the immediate parent of the specified node.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -942,7 +942,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNodeName</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.331">getNodeName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.330">getNodeName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 <div class="block">Get the name of the current node from the specified fully-qualified path.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -958,7 +958,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>watchAndCheckExists</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.349">watchAndCheckExists</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.348">watchAndCheckExists</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                    throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Watch the specified znode for delete/create/change events.  The watcher is
@@ -981,7 +981,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setWatchIfNodeExists</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.381">setWatchIfNodeExists</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.380">setWatchIfNodeExists</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                     throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Watch the specified znode, but only if exists. Useful when watching
@@ -1005,7 +1005,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkExists</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.403">checkExists</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.402">checkExists</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                        throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Check if the specified node exists.  Sets no watches.</div>
@@ -1026,7 +1026,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>listChildrenAndWatchForNewChildren</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.439">listChildrenAndWatchForNewChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.438">listChildrenAndWatchForNewChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                                        throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Lists the children znodes of the specified znode.  Also sets a watch on
@@ -1055,7 +1055,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>listChildrenAndWatchThem</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.469">listChildrenAndWatchThem</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.468">listChildrenAndWatchThem</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                              throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">List all the children of the specified znode, setting a watch for children
@@ -1078,7 +1078,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>listChildrenNoWatch</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.495">listChildrenNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.494">listChildrenNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                         throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Lists the children of the specified znode without setting any watches.
@@ -1105,7 +1105,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>nodeHasChildren</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.552">nodeHasChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.551">nodeHasChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Checks if the specified znode has any children.  Sets no watches.
@@ -1134,7 +1134,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumberOfChildren</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.584">getNumberOfChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.583">getNumberOfChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Get the number of children of the specified node.
@@ -1160,7 +1160,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.607">getData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.606">getData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                       throws org.apache.zookeeper.KeeperException,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -1180,7 +1180,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataAndWatch</h4>
-<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.635">getDataAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.634">getDataAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                               throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Get the data at the specified znode and set a watch.
@@ -1204,7 +1204,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataAndWatch</h4>
-<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.652">getDataAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.651">getDataAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                      org.apache.zookeeper.data.Stat&nbsp;stat)
                               throws org.apache.zookeeper.KeeperException</pre>
@@ -1230,7 +1230,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataInternal</h4>
-<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.657">getDataInternal</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.656">getDataInternal</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                       org.apache.zookeeper.data.Stat&nbsp;stat,
                                       boolean&nbsp;watcherSet)
@@ -1247,7 +1247,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataNoWatch</h4>
-<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.696">getDataNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.695">getDataNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                     org.apache.zookeeper.data.Stat&nbsp;stat)
                              throws org.apache.zookeeper.KeeperException</pre>
@@ -1277,7 +1277,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <li class="blockList">
 <h4>getChildDataAndWatchForNewChildren</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.NodeAndData</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.736">getChildDataAndWatchForNewChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.NodeAndData</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.735">getChildDataAndWatchForNewChildren</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;baseNode)
                                                                                throws org.apache.zookeeper.KeeperException</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">Unused</span></div>
@@ -1308,7 +1308,7 @@ public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/
 <li class="blockList">
 <h4>updateExistingNodeData</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.769">updateExistingNodeData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.768">updateExistingNodeData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                                       byte[]&nbsp;data,
                                                       int&nbsp;expectedVersion)
@@ -1338,7 +1338,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>setData</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.802">setData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.801">setData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                               byte[]&nbsp;data,
                               int&nbsp;expectedVersion)
@@ -1375,7 +1375,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createSetData</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.822">createSetData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.821">createSetData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                  byte[]&nbsp;data)
                           throws org.apache.zookeeper.KeeperException</pre>
@@ -1397,7 +1397,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>setData</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.847">setData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.846">setData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                            byte[]&nbsp;data)
                     throws org.apache.zookeeper.KeeperException,
@@ -1428,7 +1428,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>setData</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.852">setData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.851">setData</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                             <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp.SetData</a>&nbsp;setData)
                      throws org.apache.zookeeper.KeeperException,
                             org.apache.zookeeper.KeeperException.NoNodeException</pre>
@@ -1445,7 +1445,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>isSecureZooKeeper</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.863">isSecureZooKeeper</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.862">isSecureZooKeeper</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Returns whether or not secure authentication is enabled
  (whether <code>hbase.security.authentication</code> is set to
  <code>kerberos</code>.</div>
@@ -1457,7 +1457,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createACL</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;org.apache.zookeeper.data.ACL&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.888">createACL</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;org.apache.zookeeper.data.ACL&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.887">createACL</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)</pre>
 </li>
 </ul>
@@ -1467,7 +1467,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createACL</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;org.apache.zookeeper.data.ACL&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.892">createACL</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;org.apache.zookeeper.data.ACL&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.891">createACL</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
                                                                  boolean&nbsp;isSecureZooKeeper)</pre>
 </li>
@@ -1478,7 +1478,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createEphemeralNodeAndWatch</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.960">createEphemeralNodeAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.959">createEphemeralNodeAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                                   byte[]&nbsp;data)
                                            throws org.apache.zookeeper.KeeperException</pre>
@@ -1509,7 +1509,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createNodeIfNotExistsAndWatch</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1000">createNodeIfNotExistsAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.999">createNodeIfNotExistsAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                                     byte[]&nbsp;data)
                                              throws org.apache.zookeeper.KeeperException</pre>
@@ -1543,7 +1543,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createNodeIfNotExistsNoWatch</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1036">createNodeIfNotExistsNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1035">createNodeIfNotExistsNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                                   byte[]&nbsp;data,
                                                   org.apache.zookeeper.CreateMode&nbsp;createMode)
@@ -1572,7 +1572,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createAndWatch</h4>
-<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1067">createAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1066">createAndWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                  byte[]&nbsp;data)
                           throws org.apache.zookeeper.KeeperException,
@@ -1603,7 +1603,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>asyncCreate</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1100">asyncCreate</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1099">asyncCreate</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                byte[]&nbsp;data,
                                org.apache.zookeeper.AsyncCallback.StringCallback&nbsp;cb,
@@ -1629,7 +1629,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createAndFailSilent</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1117">createAndFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1116">createAndFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                 throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Creates the specified node, iff the node does not exist.  Does not set a
@@ -1651,7 +1651,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createAndFailSilent</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1133">createAndFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1132">createAndFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                        byte[]&nbsp;data)
                                 throws org.apache.zookeeper.KeeperException</pre>
@@ -1675,7 +1675,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createAndFailSilent</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1140">createAndFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1139">createAndFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                         <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp.CreateAndFailSilent</a>&nbsp;cafs)
                                  throws org.apache.zookeeper.KeeperException</pre>
 <dl>
@@ -1690,7 +1690,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createWithParents</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1175">createWithParents</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1174">createWithParents</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                               throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Creates the specified node and all parent nodes required for it to exist.
@@ -1713,7 +1713,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>createWithParents</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1193">createWithParents</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1192">createWithParents</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                      byte[]&nbsp;data)
                               throws org.apache.zookeeper.KeeperException</pre>
@@ -1739,7 +1739,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNode</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1218">deleteNode</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1217">deleteNode</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)
                        throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Delete the specified node.  Sets no watches.  Throws all exceptions.</div>
@@ -1755,7 +1755,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNode</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1227">deleteNode</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1226">deleteNode</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
                                  int&nbsp;version)
                           throws org.apache.zookeeper.KeeperException</pre>
@@ -1773,7 +1773,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNodeFailSilent</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1248">deleteNodeFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1247">deleteNodeFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)
                                  throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Deletes the specified node.  Fails silent if the node does not exist.</div>
@@ -1792,7 +1792,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNodeFailSilent</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1254">deleteNodeFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1253">deleteNodeFailSilent</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                          <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp.DeleteNodeFailSilent</a>&nbsp;dnfs)
                                   throws org.apache.zookeeper.KeeperException</pre>
 <dl>
@@ -1807,7 +1807,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNodeRecursively</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1274">deleteNodeRecursively</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1273">deleteNodeRecursively</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)
                                   throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Delete the specified node and all of it's children.
@@ -1828,7 +1828,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteChildrenRecursively</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1287">deleteChildrenRecursively</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1286">deleteChildrenRecursively</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)
                                       throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Delete all the children of the specified node but not the node itself.
@@ -1847,7 +1847,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteChildrenRecursivelyMultiOrSequential</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1322">deleteChildrenRecursivelyMultiOrSequential</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1321">deleteChildrenRecursivelyMultiOrSequential</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                               boolean&nbsp;runSequentialOnMultiFailure,
                                                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>...&nbsp;pathRoots)
                                                        throws org.apache.zookeeper.KeeperException</pre>
@@ -1884,7 +1884,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNodeRecursivelyMultiOrSequential</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1373">deleteNodeRecursivelyMultiOrSequential</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1372">deleteNodeRecursivelyMultiOrSequential</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                           boolean&nbsp;runSequentialOnMultiFailure,
                                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>...&nbsp;pathRoots)
                                                    throws org.apache.zookeeper.KeeperException</pre>
@@ -1921,7 +1921,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>listChildrenBFSNoWatch</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1415">listChildrenBFSNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1414">listChildrenBFSNoWatch</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                             throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">BFS Traversal of all the children under path, with the entries in the list,
@@ -1944,7 +1944,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>listChildrenBFSAndWatchThem</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1451">listChildrenBFSAndWatchThem</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1450">listChildrenBFSAndWatchThem</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)
                                                  throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">BFS Traversal of all the children under path, with the entries in the list,
@@ -1967,7 +1967,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>toZooKeeperOp</h4>
-<pre>private static&nbsp;org.apache.zookeeper.Op&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1636">toZooKeeperOp</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;org.apache.zookeeper.Op&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1635">toZooKeeperOp</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                                      <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;op)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/UnsupportedOperationException.html?is-external=true" title="class or interface in java.lang">UnsupportedOperationException</a></pre>
 <div class="block">Convert from ZKUtilOp to ZKOp</div>
@@ -1983,7 +1983,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>multiOrSequential</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1676">multiOrSequential</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1675">multiOrSequential</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&gt;&nbsp;ops,
                                      boolean&nbsp;runSequentialOnMultiFailure)
                               throws org.apache.zookeeper.KeeperException</pre>
@@ -2014,7 +2014,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>processSequentially</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1714">processSequentially</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1713">processSequentially</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&gt;&nbsp;ops)
                                  throws org.apache.zookeeper.KeeperException,
                                         org.apache.zookeeper.KeeperException.NoNodeException</pre>
@@ -2031,7 +2031,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>dump</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1735">dump</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1734">dump</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>String dump of everything in ZooKeeper.</dd>
@@ -2044,7 +2044,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>getReplicationZnodesDump</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1801">getReplicationZnodesDump</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1800">getReplicationZnodesDump</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true" title="class or interface in java.lang">StringBuilder</a>&nbsp;sb)
                                       throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Appends replication znodes to the passed StringBuilder.</div>
@@ -2063,7 +2063,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>appendHFileRefsZnodes</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1824">appendHFileRefsZnodes</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1823">appendHFileRefsZnodes</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hfileRefsZnode,
                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true" title="class or interface in java.lang">StringBuilder</a>&nbsp;sb)
                                    throws org.apache.zookeeper.KeeperException</pre>
@@ -2079,7 +2079,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>getReplicationZnodesDump</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1846">getReplicationZnodesDump</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1845">getReplicationZnodesDump</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)
                                        throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Returns a string with replication znodes and position of the replication log</div>
 <dl>
@@ -2098,7 +2098,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>appendRSZnodes</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1852">appendRSZnodes</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1851">appendRSZnodes</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true" title="class or interface in java.lang">StringBuilder</a>&nbsp;sb)
                             throws org.apache.zookeeper.KeeperException</pre>
@@ -2114,7 +2114,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>appendPeersZnodes</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1883">appendPeersZnodes</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1882">appendPeersZnodes</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;peersZnode,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true" title="class or interface in java.lang">StringBuilder</a>&nbsp;sb)
                                throws org.apache.zookeeper.KeeperException</pre>
@@ -2130,7 +2130,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>appendPeerState</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1911">appendPeerState</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1910">appendPeerState</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znodeToProcess,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true" title="class or interface in java.lang">StringBuilder</a>&nbsp;sb)
                              throws org.apache.zookeeper.KeeperException,
@@ -2148,7 +2148,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerStats</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1947">getServerStats</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;server,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1946">getServerStats</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;server,
                                       int&nbsp;timeout)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Gets the statistics from the given server.</div>
@@ -2169,7 +2169,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>logRetrievedMsg</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1983">logRetrievedMsg</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1982">logRetrievedMsg</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                                     byte[]&nbsp;data,
                                     boolean&nbsp;watcherSet)</pre>
@@ -2181,7 +2181,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerNameOrEmptyString</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2000">getServerNameOrEmptyString</a>(byte[]&nbsp;data)</pre>
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1999">getServerNameOrEmptyString</a>(byte[]&nbsp;data)</pre>
 </li>
 </ul>
 <a name="waitForBaseZNode-org.apache.hadoop.conf.Configuration-">
@@ -2190,7 +2190,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForBaseZNode</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2012">waitForBaseZNode</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2011">waitForBaseZNode</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Waits for HBase installation's base (parent) znode to become available.</div>
 <dl>
@@ -2205,7 +2205,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>convert</h4>
-<pre>public static&nbsp;org.apache.zookeeper.KeeperException&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2056">convert</a>(<a href="../../../../../org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions">DeserializationException</a>&nbsp;e)</pre>
+<pre>public static&nbsp;org.apache.zookeeper.KeeperException&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2055">convert</a>(<a href="../../../../../org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions">DeserializationException</a>&nbsp;e)</pre>
 <div class="block">Convert a <a href="../../../../../org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions"><code>DeserializationException</code></a> to a more palatable <code>KeeperException</code>.
  Used when can't let a <a href="../../../../../org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions"><code>DeserializationException</code></a> out w/o changing public API.</div>
 <dl>
@@ -2222,7 +2222,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>logZKTree</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2066">logZKTree</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2065">logZKTree</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;root)</pre>
 <div class="block">Recursively print the current state of ZK (non-transactional)</div>
 <dl>
@@ -2237,7 +2237,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>logZKTree</h4>
-<pre>protected static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2086">logZKTree</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>protected static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2085">logZKTree</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;root,
                                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;prefix)
                          throws org.apache.zookeeper.KeeperException</pre>
@@ -2256,7 +2256,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockList">
 <li class="blockList">
 <h4>positionToByteArray</h4>
-<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2106">positionToByteArray</a>(long&nbsp;position)</pre>
+<pre>public static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2105">positionToByteArray</a>(long&nbsp;position)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>position</code> - the position to serialize</dd>
@@ -2272,7 +2272,7 @@ public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/
 <ul class="blockListLast">
 <li class="blockList">
 <h4>parseWALPositionFrom</h4>
-<pre>public static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2117">parseWALPositionFrom</a>(byte[]&nbsp;bytes)
+<pre>public static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.2116">parseWALPositionFrom</a>(byte[]&nbsp;bytes)
                                  throws <a href="../../../../../org/apache/hadoop/hbase/exceptions/DeserializationException.html" title="class in org.apache.hadoop.hbase.exceptions">DeserializationException</a></pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>


[05/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()),

<TRUNCATED>

[10/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>     

<TRUNCATED>

[37/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html b/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
index 397f886..da375bf 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
@@ -2077,11 +2077,11 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                           int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions and their locations for the given path and replica ID.</div>
@@ -2097,7 +2097,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;pairs)</code>&nbsp;</td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 0661cfe..f9a1d76 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -540,14 +540,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
 </ul>
 </li>
 </ul>


[23/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index ea05301..26a93dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -269,3590 +269,3574 @@
 <span class="sourceLineNo">261</span>   */<a name="line.261"></a>
 <span class="sourceLineNo">262</span>  protected ClusterConnection clusterConnection;<a name="line.262"></a>
 <span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /*<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Long-living meta table locator, which is created when the server is started and stopped<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * when server shuts down. References to this locator shall be used to perform according<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * operations in EventHandlers. Primary reason for this decision is to make it mockable<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * for tests.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  protected MetaTableLocator metaTableLocator;<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * Go here to get table descriptors.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  protected TableDescriptors tableDescriptors;<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>  // Replication services. If no replication, this handler will be null.<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  // Compactions<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public CompactSplit compactSplitThread;<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Map of regions currently being served by this region server. Key is the<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * encoded region name.  All access should be synchronized.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /**<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * and here we really mean DataNode locations.<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  // Leases<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  protected Leases leases;<a name="line.303"></a>
+<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * Go here to get table descriptors.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  protected TableDescriptors tableDescriptors;<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>  // Replication services. If no replication, this handler will be null.<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  // Compactions<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public CompactSplit compactSplitThread;<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /**<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * Map of regions currently being served by this region server. Key is the<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * encoded region name.  All access should be synchronized.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.280"></a>
+<span class="sourceLineNo">281</span><a name="line.281"></a>
+<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
+<span class="sourceLineNo">283</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * and here we really mean DataNode locations.<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   */<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  // Leases<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  protected Leases leases;<a name="line.295"></a>
+<span class="sourceLineNo">296</span><a name="line.296"></a>
+<span class="sourceLineNo">297</span>  // Instance of the hbase executor executorService.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  protected ExecutorService executorService;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  // If false, the file system has become unavailable<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  protected volatile boolean fsOk;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  protected HFileSystem fs;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  protected HFileSystem walFs;<a name="line.303"></a>
 <span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>  // Instance of the hbase executor executorService.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  protected ExecutorService executorService;<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  // If false, the file system has become unavailable<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  protected volatile boolean fsOk;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  protected HFileSystem fs;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  protected HFileSystem walFs;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  // Set when a report to the master comes back with a message asking us to<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  // of HRegionServer in isolation.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private volatile boolean stopped = false;<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // debugging and unit tests.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private volatile boolean abortRequested;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  // Default abort timeout is 1200 seconds for safe<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  // Will run this task when abort timeout<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.325"></a>
+<span class="sourceLineNo">305</span>  // Set when a report to the master comes back with a message asking us to<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  // of HRegionServer in isolation.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private volatile boolean stopped = false;<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  // debugging and unit tests.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>  private volatile boolean abortRequested;<a name="line.312"></a>
+<span class="sourceLineNo">313</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  // Default abort timeout is 1200 seconds for safe<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  // Will run this task when abort timeout<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  // space regions.<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  private boolean stopping = false;<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>  volatile boolean killed = false;<a name="line.325"></a>
 <span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.327"></a>
+<span class="sourceLineNo">327</span>  private volatile boolean shutDown = false;<a name="line.327"></a>
 <span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  // space regions.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  private boolean stopping = false;<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  volatile boolean killed = false;<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  private volatile boolean shutDown = false;<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>  protected final Configuration conf;<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Path rootDir;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Path walRootDir;<a name="line.340"></a>
+<span class="sourceLineNo">329</span>  protected final Configuration conf;<a name="line.329"></a>
+<span class="sourceLineNo">330</span><a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private Path rootDir;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  private Path walRootDir;<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  final int numRetries;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  protected final int threadWakeFrequency;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  protected final int msgInterval;<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  protected final int numRegionsToReport;<a name="line.340"></a>
 <span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  final int numRetries;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  protected final int threadWakeFrequency;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected final int msgInterval;<a name="line.346"></a>
+<span class="sourceLineNo">342</span>  // Stub to do region server status calls against the master.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  RpcClient rpcClient;<a name="line.346"></a>
 <span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  protected final int numRegionsToReport;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  // Stub to do region server status calls against the master.<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  RpcClient rpcClient;<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.357"></a>
+<span class="sourceLineNo">348</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  // into web context.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  protected InfoServer infoServer;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  private JvmPauseMonitor pauseMonitor;<a name="line.357"></a>
 <span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  // into web context.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  protected InfoServer infoServer;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  private JvmPauseMonitor pauseMonitor;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** region server process name */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String REGIONSERVER = "regionserver";<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  MetricsRegionServer metricsRegionServer;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  MetricsTable metricsTable;<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  private SpanReceiverHost spanReceiverHost;<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private ChoreService choreService;<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /*<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Check for compactions requests.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  ScheduledChore compactionChecker;<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /*<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * Check for flushes<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  ScheduledChore periodicFlusher;<a name="line.387"></a>
-<span class="sourceLineNo">388</span><a name="line.388"></a>
-<span class="sourceLineNo">389</span>  protected volatile WALFactory walFactory;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // WAL roller. log is protected rather than private to avoid<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // eclipse warning when accessed by inner classes<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  protected LogRoller walRoller;<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // A thread which calls reportProcedureDone<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // flag set after we're done setting up server threads<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // zookeeper connection and watcher<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  protected final ZKWatcher zooKeeper;<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // master address tracker<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // Cluster Status Tracker<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  // Log Splitting Worker<a name="line.410"></a>
-<span class="sourceLineNo">411</span>  private SplitLogWorker splitLogWorker;<a name="line.411"></a>
+<span class="sourceLineNo">359</span>  /** region server process name */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public static final String REGIONSERVER = "regionserver";<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  MetricsRegionServer metricsRegionServer;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  MetricsTable metricsTable;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>  private SpanReceiverHost spanReceiverHost;<a name="line.364"></a>
+<span class="sourceLineNo">365</span><a name="line.365"></a>
+<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private ChoreService choreService;<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /*<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Check for compactions requests.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  ScheduledChore compactionChecker;<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  /*<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * Check for flushes<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   */<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  ScheduledChore periodicFlusher;<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  protected volatile WALFactory walFactory;<a name="line.381"></a>
+<span class="sourceLineNo">382</span><a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // WAL roller. log is protected rather than private to avoid<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // eclipse warning when accessed by inner classes<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected LogRoller walRoller;<a name="line.385"></a>
+<span class="sourceLineNo">386</span><a name="line.386"></a>
+<span class="sourceLineNo">387</span>  // A thread which calls reportProcedureDone<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.388"></a>
+<span class="sourceLineNo">389</span><a name="line.389"></a>
+<span class="sourceLineNo">390</span>  // flag set after we're done setting up server threads<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // zookeeper connection and watcher<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  protected final ZKWatcher zooKeeper;<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // master address tracker<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.397"></a>
+<span class="sourceLineNo">398</span><a name="line.398"></a>
+<span class="sourceLineNo">399</span>  // Cluster Status Tracker<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // Log Splitting Worker<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  private SplitLogWorker splitLogWorker;<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // A sleeper that sleeps for msgInterval.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  protected final Sleeper sleeper;<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private final int operationTimeout;<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  private final int shortOperationTimeout;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.411"></a>
 <span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>  // A sleeper that sleeps for msgInterval.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  protected final Sleeper sleeper;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  private final int operationTimeout;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  private final int shortOperationTimeout;<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.419"></a>
+<span class="sourceLineNo">413</span>  // Cache configuration and block cache reference<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  protected CacheConfig cacheConfig;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  // Cache configuration for mob<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  final MobCacheConfig mobCacheConfig;<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** The health check chore. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  private HealthCheckChore healthCheckChore;<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  // Cache configuration and block cache reference<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  protected CacheConfig cacheConfig;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  // Cache configuration for mob<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  final MobCacheConfig mobCacheConfig;<a name="line.424"></a>
+<span class="sourceLineNo">421</span>  /** The nonce manager chore. */<a name="line.421"></a>
+<span class="sourceLineNo">422</span>  private ScheduledChore nonceManagerChore;<a name="line.422"></a>
+<span class="sourceLineNo">423</span><a name="line.423"></a>
+<span class="sourceLineNo">424</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.424"></a>
 <span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>  /** The health check chore. */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>  private HealthCheckChore healthCheckChore;<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>  /** The nonce manager chore. */<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  private ScheduledChore nonceManagerChore;<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * against  Master.<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected ServerName serverName;<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /*<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * hostname specified by hostname config<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  protected String useThisHostnameInstead;<a name="line.444"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * against  Master.<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   */<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  protected ServerName serverName;<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /*<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * hostname specified by hostname config<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  protected String useThisHostnameInstead;<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  // key to the config parameter of server hostname<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  // both master and region server<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.441"></a>
+<span class="sourceLineNo">442</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.444"></a>
 <span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  // key to the config parameter of server hostname<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  // both master and region server<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  // Exception will be thrown if both are used.<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * This servers startcode.<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  protected final long startcode;<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * Unique identifier for the cluster we are a part of.<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  protected String clusterId;<a name="line.467"></a>
+<span class="sourceLineNo">446</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  // Exception will be thrown if both are used.<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * This servers startcode.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  protected final long startcode;<a name="line.454"></a>
+<span class="sourceLineNo">455</span><a name="line.455"></a>
+<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
+<span class="sourceLineNo">457</span>   * Unique identifier for the cluster we are a part of.<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   */<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  protected String clusterId;<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  /**<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * Chore to clean periodically the moved region list<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>  // chore for refreshing store files for secondary regions<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  private StorefileRefresherChore storefileRefresher;<a name="line.467"></a>
 <span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Chore to clean periodically the moved region list<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  // chore for refreshing store files for secondary regions<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  private StorefileRefresherChore storefileRefresher;<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  private RegionServerCoprocessorHost rsHost;<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>  /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * HBASE-3787) are:<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   */<a name="line.501"></a>
-<span class="sourceLineNo">502</span>  final ServerNonceManager nonceManager;<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>  private UserProvider userProvider;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  protected final RSRpcServices rpcServices;<a name="line.506"></a>
+<span class="sourceLineNo">469</span>  private RegionServerCoprocessorHost rsHost;<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /**<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   * HBASE-3787) are:<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.484"></a>
+<span class="sourceLineNo">485</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.485"></a>
+<span class="sourceLineNo">486</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  final ServerNonceManager nonceManager;<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  private UserProvider userProvider;<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>  protected final RSRpcServices rpcServices;<a name="line.498"></a>
+<span class="sourceLineNo">499</span><a name="line.499"></a>
+<span class="sourceLineNo">500</span>  protected CoordinatedStateManager csm;<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>  /**<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  protected final ConfigurationManager configurationManager;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  protected CoordinatedStateManager csm;<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   */<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  protected final ConfigurationManager configurationManager;<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.517"></a>
+<span class="sourceLineNo">508</span>  @VisibleForTesting<a name="line.508"></a>
+<span class="sourceLineNo">509</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.509"></a>
+<span class="sourceLineNo">510</span><a name="line.510"></a>
+<span class="sourceLineNo">511</span>  private volatile ThroughputController flushThroughputController;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.513"></a>
+<span class="sourceLineNo">514</span><a name="line.514"></a>
+<span class="sourceLineNo">515</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.515"></a>
+<span class="sourceLineNo">516</span><a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.517"></a>
 <span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  private volatile ThroughputController flushThroughputController;<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   */<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private final boolean masterless;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  /**<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * Starts a HRegionServer at the default location<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  // Don't start any services or managers in here in the Constructor.<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    super("RegionServer");  // thread name<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TraceUtil.initTracer(conf);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    try {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      this.startcode = System.currentTimeMillis();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      this.conf = conf;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.fsOk = true;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      HFile.checkHFileVersion(this.conf);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      checkCodecs(this.conf);<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.554"></a>
+<span class="sourceLineNo">519</span>  /**<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.523"></a>
+<span class="sourceLineNo">524</span>   */<a name="line.524"></a>
+<span class="sourceLineNo">525</span>  private final boolean masterless;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>   * Starts a HRegionServer at the default location<a name="line.529"></a>
+<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>  // Don't start any services or managers in here in the Constructor.<a name="line.531"></a>
+<span class="sourceLineNo">532</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    super("RegionServer");  // thread name<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    TraceUtil.initTracer(conf);<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    try {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      this.startcode = System.currentTimeMillis();<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      this.conf = conf;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>      this.fsOk = true;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      HFile.checkHFileVersion(this.conf);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      checkCodecs(this.conf);<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.546"></a>
+<span class="sourceLineNo">547</span><a name="line.547"></a>
+<span class="sourceLineNo">548</span>      // Disable usage of meta replicas in the regionserver<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      // Config'ed params<a name="line.550"></a>
+<span class="sourceLineNo">551</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.553"></a>
+<span class="sourceLineNo">554</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.554"></a>
 <span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>      // Disable usage of meta replicas in the regionserver<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      // Config'ed params<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.564"></a>
+<span class="sourceLineNo">556</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.556"></a>
+<span class="sourceLineNo">557</span><a name="line.557"></a>
+<span class="sourceLineNo">558</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.561"></a>
+<span class="sourceLineNo">562</span><a name="line.562"></a>
+<span class="sourceLineNo">563</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.564"></a>
 <span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.567"></a>
+<span class="sourceLineNo">566</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.566"></a>
+<span class="sourceLineNo">567</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.567"></a>
 <span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.571"></a>
-<span class="sourceLineNo">572</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.575"></a>
-<span class="sourceLineNo">576</span><a name="line.576"></a>
-<span class="sourceLineNo">577</span>      this.abortRequested = false;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      this.stopped = false;<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      rpcServices = createRpcServices();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String hostName =<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.583"></a>
-<span class="sourceLineNo">584</span>              : this.useThisHostnameInstead;<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // login the zookeeper client principal (if using security)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      // login the server principal (if using secure Hadoop)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      login(userProvider, hostName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
-<span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
-<span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
-<span class="sourceLineNo">608</span><a name="line.608"></a>
-<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
-<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
-<span class="sourceLineNo">633</span><a name="line.633"></a>
-<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
-<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
-<span class="sourceLineNo">661</span><a name="line.661"></a>
-<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span><a name="line.733"></a>
-<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
-<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
-<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
-<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">569</span>      this.abortRequested = false;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      this.stopped = false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span><a name="line.571"></a>
+<span class="sourceLineNo">572</span>      rpcServices = createRpcServices();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      String hostName =<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.575"></a>
+<span class="sourceLineNo">576</span>              : this.useThisHostnameInstead;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.579"></a>
+<span class="sourceLineNo">580</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>      // login the zookeeper client principal (if using security)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.583"></a>
+<span class="sourceLineNo">584</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.584"></a>
+<span class="sourceLineNo">585</span>      // login the server principal (if using secure Hadoop)<a name="line.585"></a>
+<span class="sourceLineNo">586</span>      login(userProvider, hostName);<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      // init superusers and add the server principal (if using security)<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      // or process owner as default super user.<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      Superusers.initialize(conf);<a name="line.589"></a>
+<span class="sourceLineNo">590</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.590"></a>
+<span class="sourceLineNo">591</span><a name="line.591"></a>
+<span class="sourceLineNo">592</span>      boolean isMasterNotCarryTable =<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>      // no need to instantiate global block cache when master not carry table<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      if (!isMasterNotCarryTable) {<a name="line.595"></a>
+<span class="sourceLineNo">596</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      }<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      cacheConfig = new CacheConfig(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.599"></a>
+<span class="sourceLineNo">600</span><a name="line.600"></a>
+<span class="sourceLineNo">601</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.601"></a>
+<span class="sourceLineNo">602</span>        @Override<a name="line.602"></a>
+<span class="sourceLineNo">603</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      };<a name="line.606"></a>
+<span class="sourceLineNo">607</span><a name="line.607"></a>
+<span class="sourceLineNo">608</span>      initializeFileSystem();<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.609"></a>
+<span class="sourceLineNo">610</span><a name="line.610"></a>
+<span class="sourceLineNo">611</span>      this.configurationManager = new ConfigurationManager();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.612"></a>
+<span class="sourceLineNo">613</span><a name="line.613"></a>
+<span class="sourceLineNo">614</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        // Open connection to zookeeper and set primary watcher<a name="line.616"></a>
+<span class="sourceLineNo">617</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        if (!this.masterless) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.621"></a>
+<span class="sourceLineNo">622</span><a name="line.622"></a>
+<span class="sourceLineNo">623</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          masterAddressTracker.start();<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          clusterStatusTracker.start();<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        } else {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          masterAddressTracker = null;<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          clusterStatusTracker = null;<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        }<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        zooKeeper = null;<a name="line.633"></a>
+<span class="sourceLineNo">634</span>        masterAddressTracker = null;<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        clusterStatusTracker = null;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      this.rpcServices.start(zooKeeper);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      // class HRS. TODO.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      this.choreService = new ChoreService(getName(), true);<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      this.executorService = new ExecutorService(getName());<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      putUpWebUI();<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    } catch (Throwable t) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // cause of failed startup is lost.<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      LOG.error("Failed construction RegionServer", t);<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      throw t;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
+<span class="sourceLineNo">653</span><a name="line.653"></a>
+<span class="sourceLineNo">654</span>  // HMaster should override this method to load the specific config for master<a name="line.654"></a>
+<span class="sourceLineNo">655</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.659"></a>
+<span class="sourceLineNo">660</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.660"></a>
+<span class="sourceLineNo">661</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.661"></a>
+<span class="sourceLineNo">662</span>        throw new IOException(msg);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>      } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>        return rpcServices.isa.getHostName();<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      return hostname;<a name="line.667"></a>
+<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
+<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
+<span class="sourceLineNo">670</span><a name="line.670"></a>
+<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>   * If running on Windows, do windows-specific setup.<a name="line.672"></a>
+<span class="sourceLineNo">673</span>   */<a name="line.673"></a>
+<span class="sourceLineNo">674</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.674"></a>


<TRUNCATED>

[22/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
index 6f82cee..3bf3150 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
@@ -40,162 +40,156 @@
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.ClusterConnection;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.util.Tool;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.util.ToolRunner;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.39"></a>
-<span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>/**<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * In a scenario of Replication based Disaster/Recovery, when hbase Master-Cluster crashes, this<a name="line.42"></a>
-<span class="sourceLineNo">043</span> * tool is used to sync-up the delta from Master to Slave using the info from ZooKeeper. The tool<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * will run on Master-Cluser, and assume ZK, Filesystem and NetWork still available after hbase<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * crashes<a name="line.45"></a>
-<span class="sourceLineNo">046</span> *<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * &lt;pre&gt;<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * hbase org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * &lt;/pre&gt;<a name="line.49"></a>
-<span class="sourceLineNo">050</span> */<a name="line.50"></a>
-<span class="sourceLineNo">051</span>@InterfaceAudience.Private<a name="line.51"></a>
-<span class="sourceLineNo">052</span>public class ReplicationSyncUp extends Configured implements Tool {<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>  private static final long SLEEP_TIME = 10000;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  /**<a name="line.56"></a>
-<span class="sourceLineNo">057</span>   * Main program<a name="line.57"></a>
-<span class="sourceLineNo">058</span>   */<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public static void main(String[] args) throws Exception {<a name="line.59"></a>
-<span class="sourceLineNo">060</span>    int ret = ToolRunner.run(HBaseConfiguration.create(), new ReplicationSyncUp(), args);<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    System.exit(ret);<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  @Override<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  public int run(String[] args) throws Exception {<a name="line.65"></a>
-<span class="sourceLineNo">066</span>    Abortable abortable = new Abortable() {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>      @Override<a name="line.67"></a>
-<span class="sourceLineNo">068</span>      public void abort(String why, Throwable e) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>      }<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>      @Override<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      public boolean isAborted() {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        return false;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      }<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    };<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    Configuration conf = getConf();<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    try (ZKWatcher zkw =<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      new ZKWatcher(conf, "syncupReplication" + System.currentTimeMillis(), abortable, true)) {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      Path walRootDir = FSUtils.getWALRootDir(conf);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      FileSystem fs = FSUtils.getWALFileSystem(conf);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      Path oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      Path logDir = new Path(walRootDir, HConstants.HREGION_LOGDIR_NAME);<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>      System.out.println("Start Replication Server start");<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      Replication replication = new Replication();<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      replication.initialize(new DummyServer(zkw), fs, logDir, oldLogDir, null);<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      ReplicationSourceManager manager = replication.getReplicationManager();<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      manager.init().get();<a name="line.88"></a>
-<span class="sourceLineNo">089</span>      while (manager.activeFailoverTaskCount() &gt; 0) {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>        Thread.sleep(SLEEP_TIME);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      }<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      while (manager.getOldSources().size() &gt; 0) {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        Thread.sleep(SLEEP_TIME);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>      }<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      manager.join();<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    } catch (InterruptedException e) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      System.err.println("didn't wait long enough:" + e);<a name="line.97"></a>
-<span class="sourceLineNo">098</span>      return -1;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    }<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    return 0;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  }<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  class DummyServer implements Server {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    String hostname;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    ZKWatcher zkw;<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>    DummyServer(ZKWatcher zkw) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      // an unique name in case the first run fails<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      hostname = System.currentTimeMillis() + ".SyncUpTool.replication.org";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      this.zkw = zkw;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    DummyServer(String hostname) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      this.hostname = hostname;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>    @Override<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    public Configuration getConfiguration() {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>      return getConf();<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    }<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>    @Override<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    public ZKWatcher getZooKeeper() {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      return zkw;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>    @Override<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    public CoordinatedStateManager getCoordinatedStateManager() {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      return null;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>    @Override<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public MetaTableLocator getMetaTableLocator() {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      return null;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>    @Override<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    public ServerName getServerName() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return ServerName.valueOf(hostname, 1234, 1L);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    @Override<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    public void abort(String why, Throwable e) {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    }<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>    @Override<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    public boolean isAborted() {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      return false;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>    @Override<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    public void stop(String why) {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>    @Override<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    public boolean isStopped() {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      return false;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>    @Override<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    public ClusterConnection getConnection() {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      return null;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    }<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    @Override<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    public ChoreService getChoreService() {<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      return null;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>    @Override<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    public ClusterConnection getClusterConnection() {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      return null;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public FileSystem getFileSystem() {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      return null;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    }<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>    @Override<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    public boolean isStopping() {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>      return false;<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>    @Override<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    public Connection createConnection(Configuration conf) throws IOException {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      return null;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    }<a name="line.188"></a>
-<span class="sourceLineNo">189</span>  }<a name="line.189"></a>
-<span class="sourceLineNo">190</span>}<a name="line.190"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.util.Tool;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.util.ToolRunner;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
+<span class="sourceLineNo">041</span> * In a scenario of Replication based Disaster/Recovery, when hbase Master-Cluster crashes, this<a name="line.41"></a>
+<span class="sourceLineNo">042</span> * tool is used to sync-up the delta from Master to Slave using the info from ZooKeeper. The tool<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * will run on Master-Cluser, and assume ZK, Filesystem and NetWork still available after hbase<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * crashes<a name="line.44"></a>
+<span class="sourceLineNo">045</span> *<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * &lt;pre&gt;<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * hbase org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * &lt;/pre&gt;<a name="line.48"></a>
+<span class="sourceLineNo">049</span> */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>@InterfaceAudience.Private<a name="line.50"></a>
+<span class="sourceLineNo">051</span>public class ReplicationSyncUp extends Configured implements Tool {<a name="line.51"></a>
+<span class="sourceLineNo">052</span><a name="line.52"></a>
+<span class="sourceLineNo">053</span>  private static final long SLEEP_TIME = 10000;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  /**<a name="line.55"></a>
+<span class="sourceLineNo">056</span>   * Main program<a name="line.56"></a>
+<span class="sourceLineNo">057</span>   */<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  public static void main(String[] args) throws Exception {<a name="line.58"></a>
+<span class="sourceLineNo">059</span>    int ret = ToolRunner.run(HBaseConfiguration.create(), new ReplicationSyncUp(), args);<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    System.exit(ret);<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  }<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  @Override<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  public int run(String[] args) throws Exception {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    Abortable abortable = new Abortable() {<a name="line.65"></a>
+<span class="sourceLineNo">066</span>      @Override<a name="line.66"></a>
+<span class="sourceLineNo">067</span>      public void abort(String why, Throwable e) {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      }<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>      @Override<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      public boolean isAborted() {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        return false;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      }<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    };<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    Configuration conf = getConf();<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    try (ZKWatcher zkw =<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      new ZKWatcher(conf, "syncupReplication" + System.currentTimeMillis(), abortable, true)) {<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      Path walRootDir = FSUtils.getWALRootDir(conf);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      FileSystem fs = FSUtils.getWALFileSystem(conf);<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      Path oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      Path logDir = new Path(walRootDir, HConstants.HREGION_LOGDIR_NAME);<a name="line.81"></a>
+<span class="sourceLineNo">082</span><a name="line.82"></a>
+<span class="sourceLineNo">083</span>      System.out.println("Start Replication Server start");<a name="line.83"></a>
+<span class="sourceLineNo">084</span>      Replication replication = new Replication();<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      replication.initialize(new DummyServer(zkw), fs, logDir, oldLogDir, null);<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      ReplicationSourceManager manager = replication.getReplicationManager();<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      manager.init().get();<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      while (manager.activeFailoverTaskCount() &gt; 0) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        Thread.sleep(SLEEP_TIME);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      }<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      while (manager.getOldSources().size() &gt; 0) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        Thread.sleep(SLEEP_TIME);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>      }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>      manager.join();<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    } catch (InterruptedException e) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>      System.err.println("didn't wait long enough:" + e);<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      return -1;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    return 0;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  class DummyServer implements Server {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    String hostname;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    ZKWatcher zkw;<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>    DummyServer(ZKWatcher zkw) {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      // an unique name in case the first run fails<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      hostname = System.currentTimeMillis() + ".SyncUpTool.replication.org";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      this.zkw = zkw;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>    DummyServer(String hostname) {<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      this.hostname = hostname;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    }<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>    @Override<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    public Configuration getConfiguration() {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      return getConf();<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>    @Override<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    public ZKWatcher getZooKeeper() {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      return zkw;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    }<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>    @Override<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    public CoordinatedStateManager getCoordinatedStateManager() {<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      return null;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
+<span class="sourceLineNo">130</span><a name="line.130"></a>
+<span class="sourceLineNo">131</span>    @Override<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    public ServerName getServerName() {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      return ServerName.valueOf(hostname, 1234, 1L);<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    }<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>    @Override<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    public void abort(String why, Throwable e) {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    }<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>    @Override<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    public boolean isAborted() {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      return false;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>    @Override<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    public void stop(String why) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    }<a name="line.147"></a>
+<span class="sourceLineNo">148</span><a name="line.148"></a>
+<span class="sourceLineNo">149</span>    @Override<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    public boolean isStopped() {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      return false;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>    @Override<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    public ClusterConnection getConnection() {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      return null;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    }<a name="line.157"></a>
+<span class="sourceLineNo">158</span><a name="line.158"></a>
+<span class="sourceLineNo">159</span>    @Override<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    public ChoreService getChoreService() {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      return null;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    }<a name="line.162"></a>
+<span class="sourceLineNo">163</span><a name="line.163"></a>
+<span class="sourceLineNo">164</span>    @Override<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    public ClusterConnection getClusterConnection() {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      return null;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    }<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>    @Override<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    public FileSystem getFileSystem() {<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      return null;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    }<a name="line.172"></a>
+<span class="sourceLineNo">173</span><a name="line.173"></a>
+<span class="sourceLineNo">174</span>    @Override<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    public boolean isStopping() {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      return false;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    }<a name="line.177"></a>
+<span class="sourceLineNo">178</span><a name="line.178"></a>
+<span class="sourceLineNo">179</span>    @Override<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    public Connection createConnection(Configuration conf) throws IOException {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      return null;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>}<a name="line.184"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
index 6f82cee..3bf3150 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
@@ -40,162 +40,156 @@
 <span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.ClusterConnection;<a name="line.32"></a>
 <span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.33"></a>
 <span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.util.Tool;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.util.ToolRunner;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.39"></a>
-<span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>/**<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * In a scenario of Replication based Disaster/Recovery, when hbase Master-Cluster crashes, this<a name="line.42"></a>
-<span class="sourceLineNo">043</span> * tool is used to sync-up the delta from Master to Slave using the info from ZooKeeper. The tool<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * will run on Master-Cluser, and assume ZK, Filesystem and NetWork still available after hbase<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * crashes<a name="line.45"></a>
-<span class="sourceLineNo">046</span> *<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * &lt;pre&gt;<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * hbase org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp<a name="line.48"></a>
-<span class="sourceLineNo">049</span> * &lt;/pre&gt;<a name="line.49"></a>
-<span class="sourceLineNo">050</span> */<a name="line.50"></a>
-<span class="sourceLineNo">051</span>@InterfaceAudience.Private<a name="line.51"></a>
-<span class="sourceLineNo">052</span>public class ReplicationSyncUp extends Configured implements Tool {<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>  private static final long SLEEP_TIME = 10000;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  /**<a name="line.56"></a>
-<span class="sourceLineNo">057</span>   * Main program<a name="line.57"></a>
-<span class="sourceLineNo">058</span>   */<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public static void main(String[] args) throws Exception {<a name="line.59"></a>
-<span class="sourceLineNo">060</span>    int ret = ToolRunner.run(HBaseConfiguration.create(), new ReplicationSyncUp(), args);<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    System.exit(ret);<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  @Override<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  public int run(String[] args) throws Exception {<a name="line.65"></a>
-<span class="sourceLineNo">066</span>    Abortable abortable = new Abortable() {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>      @Override<a name="line.67"></a>
-<span class="sourceLineNo">068</span>      public void abort(String why, Throwable e) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>      }<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>      @Override<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      public boolean isAborted() {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        return false;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      }<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    };<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    Configuration conf = getConf();<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    try (ZKWatcher zkw =<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      new ZKWatcher(conf, "syncupReplication" + System.currentTimeMillis(), abortable, true)) {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      Path walRootDir = FSUtils.getWALRootDir(conf);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      FileSystem fs = FSUtils.getWALFileSystem(conf);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      Path oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      Path logDir = new Path(walRootDir, HConstants.HREGION_LOGDIR_NAME);<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>      System.out.println("Start Replication Server start");<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      Replication replication = new Replication();<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      replication.initialize(new DummyServer(zkw), fs, logDir, oldLogDir, null);<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      ReplicationSourceManager manager = replication.getReplicationManager();<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      manager.init().get();<a name="line.88"></a>
-<span class="sourceLineNo">089</span>      while (manager.activeFailoverTaskCount() &gt; 0) {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>        Thread.sleep(SLEEP_TIME);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      }<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      while (manager.getOldSources().size() &gt; 0) {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        Thread.sleep(SLEEP_TIME);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>      }<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      manager.join();<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    } catch (InterruptedException e) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      System.err.println("didn't wait long enough:" + e);<a name="line.97"></a>
-<span class="sourceLineNo">098</span>      return -1;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    }<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    return 0;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  }<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  class DummyServer implements Server {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    String hostname;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    ZKWatcher zkw;<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>    DummyServer(ZKWatcher zkw) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      // an unique name in case the first run fails<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      hostname = System.currentTimeMillis() + ".SyncUpTool.replication.org";<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      this.zkw = zkw;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    DummyServer(String hostname) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      this.hostname = hostname;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>    @Override<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    public Configuration getConfiguration() {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>      return getConf();<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    }<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>    @Override<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    public ZKWatcher getZooKeeper() {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      return zkw;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>    @Override<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    public CoordinatedStateManager getCoordinatedStateManager() {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      return null;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>    @Override<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public MetaTableLocator getMetaTableLocator() {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      return null;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>    @Override<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    public ServerName getServerName() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      return ServerName.valueOf(hostname, 1234, 1L);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    @Override<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    public void abort(String why, Throwable e) {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    }<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>    @Override<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    public boolean isAborted() {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      return false;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>    @Override<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    public void stop(String why) {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>    @Override<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    public boolean isStopped() {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      return false;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>    @Override<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    public ClusterConnection getConnection() {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      return null;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    }<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    @Override<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    public ChoreService getChoreService() {<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      return null;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>    @Override<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    public ClusterConnection getClusterConnection() {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      return null;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    @Override<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    public FileSystem getFileSystem() {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      return null;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    }<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>    @Override<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    public boolean isStopping() {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>      return false;<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>    @Override<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    public Connection createConnection(Configuration conf) throws IOException {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      return null;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    }<a name="line.188"></a>
-<span class="sourceLineNo">189</span>  }<a name="line.189"></a>
-<span class="sourceLineNo">190</span>}<a name="line.190"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.util.Tool;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.util.ToolRunner;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
+<span class="sourceLineNo">041</span> * In a scenario of Replication based Disaster/Recovery, when hbase Master-Cluster crashes, this<a name="line.41"></a>
+<span class="sourceLineNo">042</span> * tool is used to sync-up the delta from Master to Slave using the info from ZooKeeper. The tool<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * will run on Master-Cluser, and assume ZK, Filesystem and NetWork still available after hbase<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * crashes<a name="line.44"></a>
+<span class="sourceLineNo">045</span> *<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * &lt;pre&gt;<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * hbase org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * &lt;/pre&gt;<a name="line.48"></a>
+<span class="sourceLineNo">049</span> */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>@InterfaceAudience.Private<a name="line.50"></a>
+<span class="sourceLineNo">051</span>public class ReplicationSyncUp extends Configured implements Tool {<a name="line.51"></a>
+<span class="sourceLineNo">052</span><a name="line.52"></a>
+<span class="sourceLineNo">053</span>  private static final long SLEEP_TIME = 10000;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  /**<a name="line.55"></a>
+<span class="sourceLineNo">056</span>   * Main program<a name="line.56"></a>
+<span class="sourceLineNo">057</span>   */<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  public static void main(String[] args) throws Exception {<a name="line.58"></a>
+<span class="sourceLineNo">059</span>    int ret = ToolRunner.run(HBaseConfiguration.create(), new ReplicationSyncUp(), args);<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    System.exit(ret);<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  }<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  @Override<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  public int run(String[] args) throws Exception {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    Abortable abortable = new Abortable() {<a name="line.65"></a>
+<span class="sourceLineNo">066</span>      @Override<a name="line.66"></a>
+<span class="sourceLineNo">067</span>      public void abort(String why, Throwable e) {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      }<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>      @Override<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      public boolean isAborted() {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        return false;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      }<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    };<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    Configuration conf = getConf();<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    try (ZKWatcher zkw =<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      new ZKWatcher(conf, "syncupReplication" + System.currentTimeMillis(), abortable, true)) {<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      Path walRootDir = FSUtils.getWALRootDir(conf);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      FileSystem fs = FSUtils.getWALFileSystem(conf);<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      Path oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      Path logDir = new Path(walRootDir, HConstants.HREGION_LOGDIR_NAME);<a name="line.81"></a>
+<span class="sourceLineNo">082</span><a name="line.82"></a>
+<span class="sourceLineNo">083</span>      System.out.println("Start Replication Server start");<a name="line.83"></a>
+<span class="sourceLineNo">084</span>      Replication replication = new Replication();<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      replication.initialize(new DummyServer(zkw), fs, logDir, oldLogDir, null);<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      ReplicationSourceManager manager = replication.getReplicationManager();<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      manager.init().get();<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      while (manager.activeFailoverTaskCount() &gt; 0) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        Thread.sleep(SLEEP_TIME);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      }<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      while (manager.getOldSources().size() &gt; 0) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        Thread.sleep(SLEEP_TIME);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>      }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>      manager.join();<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    } catch (InterruptedException e) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>      System.err.println("didn't wait long enough:" + e);<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      return -1;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    return 0;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  class DummyServer implements Server {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    String hostname;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    ZKWatcher zkw;<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>    DummyServer(ZKWatcher zkw) {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      // an unique name in case the first run fails<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      hostname = System.currentTimeMillis() + ".SyncUpTool.replication.org";<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      this.zkw = zkw;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>    DummyServer(String hostname) {<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      this.hostname = hostname;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    }<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>    @Override<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    public Configuration getConfiguration() {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      return getConf();<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>    @Override<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    public ZKWatcher getZooKeeper() {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      return zkw;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    }<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>    @Override<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    public CoordinatedStateManager getCoordinatedStateManager() {<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      return null;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
+<span class="sourceLineNo">130</span><a name="line.130"></a>
+<span class="sourceLineNo">131</span>    @Override<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    public ServerName getServerName() {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      return ServerName.valueOf(hostname, 1234, 1L);<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    }<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>    @Override<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    public void abort(String why, Throwable e) {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    }<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>    @Override<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    public boolean isAborted() {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      return false;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>    @Override<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    public void stop(String why) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    }<a name="line.147"></a>
+<span class="sourceLineNo">148</span><a name="line.148"></a>
+<span class="sourceLineNo">149</span>    @Override<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    public boolean isStopped() {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      return false;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
+<span class="sourceLineNo">153</span><a name="line.153"></a>
+<span class="sourceLineNo">154</span>    @Override<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    public ClusterConnection getConnection() {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      return null;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    }<a name="line.157"></a>
+<span class="sourceLineNo">158</span><a name="line.158"></a>
+<span class="sourceLineNo">159</span>    @Override<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    public ChoreService getChoreService() {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      return null;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    }<a name="line.162"></a>
+<span class="sourceLineNo">163</span><a name="line.163"></a>
+<span class="sourceLineNo">164</span>    @Override<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    public ClusterConnection getClusterConnection() {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      return null;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    }<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>    @Override<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    public FileSystem getFileSystem() {<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      return null;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    }<a name="line.172"></a>
+<span class="sourceLineNo">173</span><a name="line.173"></a>
+<span class="sourceLineNo">174</span>    @Override<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    public boolean isStopping() {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      return false;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    }<a name="line.177"></a>
+<span class="sourceLineNo">178</span><a name="line.178"></a>
+<span class="sourceLineNo">179</span>    @Override<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    public Connection createConnection(Configuration conf) throws IOException {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      return null;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>}<a name="line.184"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
index 809f66f..9b60dd0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.FailedOpenUpdaterThread.html
@@ -765,146 +765,145 @@
 <span class="sourceLineNo">757</span>        found.set(true);<a name="line.757"></a>
 <span class="sourceLineNo">758</span>        try {<a name="line.758"></a>
 <span class="sourceLineNo">759</span>          boolean rootMetaFound =<a name="line.759"></a>
-<span class="sourceLineNo">760</span>              masterServices.getMetaTableLocator().verifyMetaRegionLocation(<a name="line.760"></a>
-<span class="sourceLineNo">761</span>                  conn, masterServices.getZooKeeper(), 1);<a name="line.761"></a>
-<span class="sourceLineNo">762</span>          if (rootMetaFound) {<a name="line.762"></a>
-<span class="sourceLineNo">763</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>              @Override<a name="line.764"></a>
-<span class="sourceLineNo">765</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.766"></a>
-<span class="sourceLineNo">767</span>                if (info != null) {<a name="line.767"></a>
-<span class="sourceLineNo">768</span>                  Cell serverCell =<a name="line.768"></a>
-<span class="sourceLineNo">769</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.769"></a>
-<span class="sourceLineNo">770</span>                          HConstants.SERVER_QUALIFIER);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.771"></a>
-<span class="sourceLineNo">772</span>                    ServerName sn =<a name="line.772"></a>
-<span class="sourceLineNo">773</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>                    if (sn == null) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>                      found.set(false);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.776"></a>
-<span class="sourceLineNo">777</span>                      try {<a name="line.777"></a>
-<span class="sourceLineNo">778</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.778"></a>
-<span class="sourceLineNo">779</span>                        ClientProtos.GetRequest request =<a name="line.779"></a>
-<span class="sourceLineNo">780</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.780"></a>
-<span class="sourceLineNo">781</span>                                new Get(ROW_KEY));<a name="line.781"></a>
-<span class="sourceLineNo">782</span>                        rs.get(null, request);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>                        assignedRegions.add(info);<a name="line.783"></a>
-<span class="sourceLineNo">784</span>                      } catch(Exception ex) {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>                      }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>                    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>                    foundRegions.add(info);<a name="line.788"></a>
-<span class="sourceLineNo">789</span>                  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span>                }<a name="line.790"></a>
-<span class="sourceLineNo">791</span>                return true;<a name="line.791"></a>
-<span class="sourceLineNo">792</span>              }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>            };<a name="line.793"></a>
-<span class="sourceLineNo">794</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.794"></a>
-<span class="sourceLineNo">795</span>            // if no regions in meta then we have to create the table<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>              createRSGroupTable();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>              createSent = true;<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            }<a name="line.799"></a>
-<span class="sourceLineNo">800</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.800"></a>
-<span class="sourceLineNo">801</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.801"></a>
-<span class="sourceLineNo">802</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.803"></a>
-<span class="sourceLineNo">804</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>          } else {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.806"></a>
-<span class="sourceLineNo">807</span>            found.set(false);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>          }<a name="line.808"></a>
-<span class="sourceLineNo">809</span>          if (found.get()) {<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            online = true;<a name="line.812"></a>
-<span class="sourceLineNo">813</span>            //flush any inconsistencies between ZK and HTable<a name="line.813"></a>
-<span class="sourceLineNo">814</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.814"></a>
-<span class="sourceLineNo">815</span>          }<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        } catch (RuntimeException e) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          throw e;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        } catch(Exception e) {<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          found.set(false);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>          LOG.warn("Failed to perform check", e);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        try {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>          Thread.sleep(100);<a name="line.823"></a>
-<span class="sourceLineNo">824</span>        } catch (InterruptedException e) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          LOG.info("Sleep interrupted", e);<a name="line.825"></a>
-<span class="sourceLineNo">826</span>        }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      }<a name="line.827"></a>
-<span class="sourceLineNo">828</span>      return found.get();<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    }<a name="line.829"></a>
-<span class="sourceLineNo">830</span><a name="line.830"></a>
-<span class="sourceLineNo">831</span>    private void createRSGroupTable() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      // wait for region to be online<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int tries = 600;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.835"></a>
-<span class="sourceLineNo">836</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          &amp;&amp; tries &gt; 0) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>        try {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>          Thread.sleep(100);<a name="line.839"></a>
-<span class="sourceLineNo">840</span>        } catch (InterruptedException e) {<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          throw new IOException("Wait interrupted ", e);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        }<a name="line.842"></a>
-<span class="sourceLineNo">843</span>        tries--;<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      }<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      if(tries &lt;= 0) {<a name="line.845"></a>
-<span class="sourceLineNo">846</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      } else {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.849"></a>
-<span class="sourceLineNo">850</span>          throw new IOException("Failed to create group table. " +<a name="line.850"></a>
-<span class="sourceLineNo">851</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.851"></a>
-<span class="sourceLineNo">852</span>        }<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      }<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    }<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    public boolean isOnline() {<a name="line.856"></a>
-<span class="sourceLineNo">857</span>      return online;<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    }<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  }<a name="line.863"></a>
-<span class="sourceLineNo">864</span><a name="line.864"></a>
-<span class="sourceLineNo">865</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>    for (Mutation mutation : mutations) {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      if (mutation instanceof Put) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            mutation));<a name="line.873"></a>
-<span class="sourceLineNo">874</span>      } else if (mutation instanceof Delete) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        mmrBuilder.addMutationRequest(<a name="line.875"></a>
-<span class="sourceLineNo">876</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.876"></a>
-<span class="sourceLineNo">877</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.877"></a>
-<span class="sourceLineNo">878</span>                  MutationType.DELETE, mutation));<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      } else {<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.880"></a>
-<span class="sourceLineNo">881</span>          + mutation.getClass().getName());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span><a name="line.884"></a>
-<span class="sourceLineNo">885</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.886"></a>
-<span class="sourceLineNo">887</span>    try {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.888"></a>
-<span class="sourceLineNo">889</span>    } catch (ServiceException ex) {<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      ProtobufUtil.toIOException(ex);<a name="line.890"></a>
-<span class="sourceLineNo">891</span>    }<a name="line.891"></a>
-<span class="sourceLineNo">892</span>  }<a name="line.892"></a>
-<span class="sourceLineNo">893</span><a name="line.893"></a>
-<span class="sourceLineNo">894</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.896"></a>
-<span class="sourceLineNo">897</span>    }<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  }<a name="line.898"></a>
-<span class="sourceLineNo">899</span>}<a name="line.899"></a>
+<span class="sourceLineNo">760</span>            Utility.verifyMetaRegionLocation(conn, masterServices.getZooKeeper(), 1);<a name="line.760"></a>
+<span class="sourceLineNo">761</span>          if (rootMetaFound) {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.762"></a>
+<span class="sourceLineNo">763</span>              @Override<a name="line.763"></a>
+<span class="sourceLineNo">764</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.764"></a>
+<span class="sourceLineNo">765</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.765"></a>
+<span class="sourceLineNo">766</span>                if (info != null) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>                  Cell serverCell =<a name="line.767"></a>
+<span class="sourceLineNo">768</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.768"></a>
+<span class="sourceLineNo">769</span>                          HConstants.SERVER_QUALIFIER);<a name="line.769"></a>
+<span class="sourceLineNo">770</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.770"></a>
+<span class="sourceLineNo">771</span>                    ServerName sn =<a name="line.771"></a>
+<span class="sourceLineNo">772</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.772"></a>
+<span class="sourceLineNo">773</span>                    if (sn == null) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>                      found.set(false);<a name="line.774"></a>
+<span class="sourceLineNo">775</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.775"></a>
+<span class="sourceLineNo">776</span>                      try {<a name="line.776"></a>
+<span class="sourceLineNo">777</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.777"></a>
+<span class="sourceLineNo">778</span>                        ClientProtos.GetRequest request =<a name="line.778"></a>
+<span class="sourceLineNo">779</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.779"></a>
+<span class="sourceLineNo">780</span>                                new Get(ROW_KEY));<a name="line.780"></a>
+<span class="sourceLineNo">781</span>                        rs.get(null, request);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>                        assignedRegions.add(info);<a name="line.782"></a>
+<span class="sourceLineNo">783</span>                      } catch(Exception ex) {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.784"></a>
+<span class="sourceLineNo">785</span>                      }<a name="line.785"></a>
+<span class="sourceLineNo">786</span>                    }<a name="line.786"></a>
+<span class="sourceLineNo">787</span>                    foundRegions.add(info);<a name="line.787"></a>
+<span class="sourceLineNo">788</span>                  }<a name="line.788"></a>
+<span class="sourceLineNo">789</span>                }<a name="line.789"></a>
+<span class="sourceLineNo">790</span>                return true;<a name="line.790"></a>
+<span class="sourceLineNo">791</span>              }<a name="line.791"></a>
+<span class="sourceLineNo">792</span>            };<a name="line.792"></a>
+<span class="sourceLineNo">793</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.793"></a>
+<span class="sourceLineNo">794</span>            // if no regions in meta then we have to create the table<a name="line.794"></a>
+<span class="sourceLineNo">795</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.795"></a>
+<span class="sourceLineNo">796</span>              createRSGroupTable();<a name="line.796"></a>
+<span class="sourceLineNo">797</span>              createSent = true;<a name="line.797"></a>
+<span class="sourceLineNo">798</span>            }<a name="line.798"></a>
+<span class="sourceLineNo">799</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.799"></a>
+<span class="sourceLineNo">800</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.800"></a>
+<span class="sourceLineNo">801</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.801"></a>
+<span class="sourceLineNo">802</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.802"></a>
+<span class="sourceLineNo">803</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.803"></a>
+<span class="sourceLineNo">804</span>          } else {<a name="line.804"></a>
+<span class="sourceLineNo">805</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.805"></a>
+<span class="sourceLineNo">806</span>            found.set(false);<a name="line.806"></a>
+<span class="sourceLineNo">807</span>          }<a name="line.807"></a>
+<span class="sourceLineNo">808</span>          if (found.get()) {<a name="line.808"></a>
+<span class="sourceLineNo">809</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.809"></a>
+<span class="sourceLineNo">810</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.810"></a>
+<span class="sourceLineNo">811</span>            online = true;<a name="line.811"></a>
+<span class="sourceLineNo">812</span>            //flush any inconsistencies between ZK and HTable<a name="line.812"></a>
+<span class="sourceLineNo">813</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.813"></a>
+<span class="sourceLineNo">814</span>          }<a name="line.814"></a>
+<span class="sourceLineNo">815</span>        } catch (RuntimeException e) {<a name="line.815"></a>
+<span class="sourceLineNo">816</span>          throw e;<a name="line.816"></a>
+<span class="sourceLineNo">817</span>        } catch(Exception e) {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>          found.set(false);<a name="line.818"></a>
+<span class="sourceLineNo">819</span>          LOG.warn("Failed to perform check", e);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>        }<a name="line.820"></a>
+<span class="sourceLineNo">821</span>        try {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>          Thread.sleep(100);<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        } catch (InterruptedException e) {<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          LOG.info("Sleep interrupted", e);<a name="line.824"></a>
+<span class="sourceLineNo">825</span>        }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>      return found.get();<a name="line.827"></a>
+<span class="sourceLineNo">828</span>    }<a name="line.828"></a>
+<span class="sourceLineNo">829</span><a name="line.829"></a>
+<span class="sourceLineNo">830</span>    private void createRSGroupTable() throws IOException {<a name="line.830"></a>
+<span class="sourceLineNo">831</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.831"></a>
+<span class="sourceLineNo">832</span>      // wait for region to be online<a name="line.832"></a>
+<span class="sourceLineNo">833</span>      int tries = 600;<a name="line.833"></a>
+<span class="sourceLineNo">834</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.834"></a>
+<span class="sourceLineNo">835</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.835"></a>
+<span class="sourceLineNo">836</span>          &amp;&amp; tries &gt; 0) {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>        try {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>          Thread.sleep(100);<a name="line.838"></a>
+<span class="sourceLineNo">839</span>        } catch (InterruptedException e) {<a name="line.839"></a>
+<span class="sourceLineNo">840</span>          throw new IOException("Wait interrupted ", e);<a name="line.840"></a>
+<span class="sourceLineNo">841</span>        }<a name="line.841"></a>
+<span class="sourceLineNo">842</span>        tries--;<a name="line.842"></a>
+<span class="sourceLineNo">843</span>      }<a name="line.843"></a>
+<span class="sourceLineNo">844</span>      if(tries &lt;= 0) {<a name="line.844"></a>
+<span class="sourceLineNo">845</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.845"></a>
+<span class="sourceLineNo">846</span>      } else {<a name="line.846"></a>
+<span class="sourceLineNo">847</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.847"></a>
+<span class="sourceLineNo">848</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>          throw new IOException("Failed to create group table. " +<a name="line.849"></a>
+<span class="sourceLineNo">850</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.850"></a>
+<span class="sourceLineNo">851</span>        }<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      }<a name="line.852"></a>
+<span class="sourceLineNo">853</span>    }<a name="line.853"></a>
+<span class="sourceLineNo">854</span><a name="line.854"></a>
+<span class="sourceLineNo">855</span>    public boolean isOnline() {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      return online;<a name="line.856"></a>
+<span class="sourceLineNo">857</span>    }<a name="line.857"></a>
+<span class="sourceLineNo">858</span>  }<a name="line.858"></a>
+<span class="sourceLineNo">859</span><a name="line.859"></a>
+<span class="sourceLineNo">860</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.860"></a>
+<span class="sourceLineNo">861</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.861"></a>
+<span class="sourceLineNo">862</span>  }<a name="line.862"></a>
+<span class="sourceLineNo">863</span><a name="line.863"></a>
+<span class="sourceLineNo">864</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.864"></a>
+<span class="sourceLineNo">865</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.865"></a>
+<span class="sourceLineNo">866</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.866"></a>
+<span class="sourceLineNo">867</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.867"></a>
+<span class="sourceLineNo">868</span>    for (Mutation mutation : mutations) {<a name="line.868"></a>
+<span class="sourceLineNo">869</span>      if (mutation instanceof Put) {<a name="line.869"></a>
+<span class="sourceLineNo">870</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.870"></a>
+<span class="sourceLineNo">871</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.871"></a>
+<span class="sourceLineNo">872</span>            mutation));<a name="line.872"></a>
+<span class="sourceLineNo">873</span>      } else if (mutation instanceof Delete) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>        mmrBuilder.addMutationRequest(<a name="line.874"></a>
+<span class="sourceLineNo">875</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.875"></a>
+<span class="sourceLineNo">876</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.876"></a>
+<span class="sourceLineNo">877</span>                  MutationType.DELETE, mutation));<a name="line.877"></a>
+<span class="sourceLineNo">878</span>      } else {<a name="line.878"></a>
+<span class="sourceLineNo">879</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.879"></a>
+<span class="sourceLineNo">880</span>          + mutation.getClass().getName());<a name="line.880"></a>
+<span class="sourceLineNo">881</span>      }<a name="line.881"></a>
+<span class="sourceLineNo">882</span>    }<a name="line.882"></a>
+<span class="sourceLineNo">883</span><a name="line.883"></a>
+<span class="sourceLineNo">884</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.884"></a>
+<span class="sourceLineNo">885</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.885"></a>
+<span class="sourceLineNo">886</span>    try {<a name="line.886"></a>
+<span class="sourceLineNo">887</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.887"></a>
+<span class="sourceLineNo">888</span>    } catch (ServiceException ex) {<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      ProtobufUtil.toIOException(ex);<a name="line.889"></a>
+<span class="sourceLineNo">890</span>    }<a name="line.890"></a>
+<span class="sourceLineNo">891</span>  }<a name="line.891"></a>
+<span class="sourceLineNo">892</span><a name="line.892"></a>
+<span class="sourceLineNo">893</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.894"></a>
+<span class="sourceLineNo">895</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.895"></a>
+<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
+<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
+<span class="sourceLineNo">898</span>}<a name="line.898"></a>
 
 
 


[39/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
index 3aa1909..f57507c 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2824">HBaseFsck.WorkItemOverlapMerge</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2818">HBaseFsck.WorkItemOverlapMerge</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 </li>
@@ -211,7 +211,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>handler</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2825">handler</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2819">handler</a></pre>
 </li>
 </ul>
 <a name="overlapgroup">
@@ -220,7 +220,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>overlapgroup</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2826">overlapgroup</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2820">overlapgroup</a></pre>
 </li>
 </ul>
 </li>
@@ -237,7 +237,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemOverlapMerge</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2828">WorkItemOverlapMerge</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlapgroup,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2822">WorkItemOverlapMerge</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlapgroup,
                      <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler)</pre>
 </li>
 </ul>
@@ -255,7 +255,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2834">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2828">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
index ffea861..5a504a4 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4339">HBaseFsck.WorkItemRegion</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4333">HBaseFsck.WorkItemRegion</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 <div class="block">Contact a region server and get all information from it</div>
@@ -226,7 +226,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>hbck</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4340">hbck</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4334">hbck</a></pre>
 </li>
 </ul>
 <a name="rsinfo">
@@ -235,7 +235,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>rsinfo</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4341">rsinfo</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4335">rsinfo</a></pre>
 </li>
 </ul>
 <a name="errors">
@@ -244,7 +244,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>errors</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4342">errors</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4336">errors</a></pre>
 </li>
 </ul>
 <a name="connection">
@@ -253,7 +253,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>connection</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4343">connection</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4337">connection</a></pre>
 </li>
 </ul>
 </li>
@@ -270,7 +270,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemRegion</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4345">WorkItemRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4339">WorkItemRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;info,
                <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
                <a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection)</pre>
@@ -290,7 +290,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4354">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4348">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -306,7 +306,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>filterRegions</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4388">filterRegions</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;regions)</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4382">filterRegions</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;regions)</pre>
 </li>
 </ul>
 </li>


[04/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.ge

<TRUNCATED>

[17/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurrentFi

<TRUNCATED>

[20/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
index 809f66f..9b60dd0 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
@@ -765,146 +765,145 @@
 <span class="sourceLineNo">757</span>        found.set(true);<a name="line.757"></a>
 <span class="sourceLineNo">758</span>        try {<a name="line.758"></a>
 <span class="sourceLineNo">759</span>          boolean rootMetaFound =<a name="line.759"></a>
-<span class="sourceLineNo">760</span>              masterServices.getMetaTableLocator().verifyMetaRegionLocation(<a name="line.760"></a>
-<span class="sourceLineNo">761</span>                  conn, masterServices.getZooKeeper(), 1);<a name="line.761"></a>
-<span class="sourceLineNo">762</span>          if (rootMetaFound) {<a name="line.762"></a>
-<span class="sourceLineNo">763</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.763"></a>
-<span class="sourceLineNo">764</span>              @Override<a name="line.764"></a>
-<span class="sourceLineNo">765</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.765"></a>
-<span class="sourceLineNo">766</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.766"></a>
-<span class="sourceLineNo">767</span>                if (info != null) {<a name="line.767"></a>
-<span class="sourceLineNo">768</span>                  Cell serverCell =<a name="line.768"></a>
-<span class="sourceLineNo">769</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.769"></a>
-<span class="sourceLineNo">770</span>                          HConstants.SERVER_QUALIFIER);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.771"></a>
-<span class="sourceLineNo">772</span>                    ServerName sn =<a name="line.772"></a>
-<span class="sourceLineNo">773</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.773"></a>
-<span class="sourceLineNo">774</span>                    if (sn == null) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>                      found.set(false);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.776"></a>
-<span class="sourceLineNo">777</span>                      try {<a name="line.777"></a>
-<span class="sourceLineNo">778</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.778"></a>
-<span class="sourceLineNo">779</span>                        ClientProtos.GetRequest request =<a name="line.779"></a>
-<span class="sourceLineNo">780</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.780"></a>
-<span class="sourceLineNo">781</span>                                new Get(ROW_KEY));<a name="line.781"></a>
-<span class="sourceLineNo">782</span>                        rs.get(null, request);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>                        assignedRegions.add(info);<a name="line.783"></a>
-<span class="sourceLineNo">784</span>                      } catch(Exception ex) {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>                      }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>                    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>                    foundRegions.add(info);<a name="line.788"></a>
-<span class="sourceLineNo">789</span>                  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span>                }<a name="line.790"></a>
-<span class="sourceLineNo">791</span>                return true;<a name="line.791"></a>
-<span class="sourceLineNo">792</span>              }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>            };<a name="line.793"></a>
-<span class="sourceLineNo">794</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.794"></a>
-<span class="sourceLineNo">795</span>            // if no regions in meta then we have to create the table<a name="line.795"></a>
-<span class="sourceLineNo">796</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>              createRSGroupTable();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>              createSent = true;<a name="line.798"></a>
-<span class="sourceLineNo">799</span>            }<a name="line.799"></a>
-<span class="sourceLineNo">800</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.800"></a>
-<span class="sourceLineNo">801</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.801"></a>
-<span class="sourceLineNo">802</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.803"></a>
-<span class="sourceLineNo">804</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>          } else {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.806"></a>
-<span class="sourceLineNo">807</span>            found.set(false);<a name="line.807"></a>
-<span class="sourceLineNo">808</span>          }<a name="line.808"></a>
-<span class="sourceLineNo">809</span>          if (found.get()) {<a name="line.809"></a>
-<span class="sourceLineNo">810</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.810"></a>
-<span class="sourceLineNo">811</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.811"></a>
-<span class="sourceLineNo">812</span>            online = true;<a name="line.812"></a>
-<span class="sourceLineNo">813</span>            //flush any inconsistencies between ZK and HTable<a name="line.813"></a>
-<span class="sourceLineNo">814</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.814"></a>
-<span class="sourceLineNo">815</span>          }<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        } catch (RuntimeException e) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          throw e;<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        } catch(Exception e) {<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          found.set(false);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>          LOG.warn("Failed to perform check", e);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>        }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>        try {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>          Thread.sleep(100);<a name="line.823"></a>
-<span class="sourceLineNo">824</span>        } catch (InterruptedException e) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>          LOG.info("Sleep interrupted", e);<a name="line.825"></a>
-<span class="sourceLineNo">826</span>        }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      }<a name="line.827"></a>
-<span class="sourceLineNo">828</span>      return found.get();<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    }<a name="line.829"></a>
-<span class="sourceLineNo">830</span><a name="line.830"></a>
-<span class="sourceLineNo">831</span>    private void createRSGroupTable() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      // wait for region to be online<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int tries = 600;<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.835"></a>
-<span class="sourceLineNo">836</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          &amp;&amp; tries &gt; 0) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>        try {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>          Thread.sleep(100);<a name="line.839"></a>
-<span class="sourceLineNo">840</span>        } catch (InterruptedException e) {<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          throw new IOException("Wait interrupted ", e);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        }<a name="line.842"></a>
-<span class="sourceLineNo">843</span>        tries--;<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      }<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      if(tries &lt;= 0) {<a name="line.845"></a>
-<span class="sourceLineNo">846</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      } else {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.849"></a>
-<span class="sourceLineNo">850</span>          throw new IOException("Failed to create group table. " +<a name="line.850"></a>
-<span class="sourceLineNo">851</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.851"></a>
-<span class="sourceLineNo">852</span>        }<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      }<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    }<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    public boolean isOnline() {<a name="line.856"></a>
-<span class="sourceLineNo">857</span>      return online;<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    }<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  }<a name="line.863"></a>
-<span class="sourceLineNo">864</span><a name="line.864"></a>
-<span class="sourceLineNo">865</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>    for (Mutation mutation : mutations) {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>      if (mutation instanceof Put) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.871"></a>
-<span class="sourceLineNo">872</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.872"></a>
-<span class="sourceLineNo">873</span>            mutation));<a name="line.873"></a>
-<span class="sourceLineNo">874</span>      } else if (mutation instanceof Delete) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        mmrBuilder.addMutationRequest(<a name="line.875"></a>
-<span class="sourceLineNo">876</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.876"></a>
-<span class="sourceLineNo">877</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.877"></a>
-<span class="sourceLineNo">878</span>                  MutationType.DELETE, mutation));<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      } else {<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.880"></a>
-<span class="sourceLineNo">881</span>          + mutation.getClass().getName());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span><a name="line.884"></a>
-<span class="sourceLineNo">885</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.886"></a>
-<span class="sourceLineNo">887</span>    try {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.888"></a>
-<span class="sourceLineNo">889</span>    } catch (ServiceException ex) {<a name="line.889"></a>
-<span class="sourceLineNo">890</span>      ProtobufUtil.toIOException(ex);<a name="line.890"></a>
-<span class="sourceLineNo">891</span>    }<a name="line.891"></a>
-<span class="sourceLineNo">892</span>  }<a name="line.892"></a>
-<span class="sourceLineNo">893</span><a name="line.893"></a>
-<span class="sourceLineNo">894</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.896"></a>
-<span class="sourceLineNo">897</span>    }<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  }<a name="line.898"></a>
-<span class="sourceLineNo">899</span>}<a name="line.899"></a>
+<span class="sourceLineNo">760</span>            Utility.verifyMetaRegionLocation(conn, masterServices.getZooKeeper(), 1);<a name="line.760"></a>
+<span class="sourceLineNo">761</span>          if (rootMetaFound) {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>            MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() {<a name="line.762"></a>
+<span class="sourceLineNo">763</span>              @Override<a name="line.763"></a>
+<span class="sourceLineNo">764</span>              public boolean visitInternal(Result row) throws IOException {<a name="line.764"></a>
+<span class="sourceLineNo">765</span>                RegionInfo info = MetaTableAccessor.getRegionInfo(row);<a name="line.765"></a>
+<span class="sourceLineNo">766</span>                if (info != null) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>                  Cell serverCell =<a name="line.767"></a>
+<span class="sourceLineNo">768</span>                      row.getColumnLatestCell(HConstants.CATALOG_FAMILY,<a name="line.768"></a>
+<span class="sourceLineNo">769</span>                          HConstants.SERVER_QUALIFIER);<a name="line.769"></a>
+<span class="sourceLineNo">770</span>                  if (RSGROUP_TABLE_NAME.equals(info.getTable()) &amp;&amp; serverCell != null) {<a name="line.770"></a>
+<span class="sourceLineNo">771</span>                    ServerName sn =<a name="line.771"></a>
+<span class="sourceLineNo">772</span>                        ServerName.parseVersionedServerName(CellUtil.cloneValue(serverCell));<a name="line.772"></a>
+<span class="sourceLineNo">773</span>                    if (sn == null) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>                      found.set(false);<a name="line.774"></a>
+<span class="sourceLineNo">775</span>                    } else if (tsm.isTableState(RSGROUP_TABLE_NAME, TableState.State.ENABLED)) {<a name="line.775"></a>
+<span class="sourceLineNo">776</span>                      try {<a name="line.776"></a>
+<span class="sourceLineNo">777</span>                        ClientProtos.ClientService.BlockingInterface rs = conn.getClient(sn);<a name="line.777"></a>
+<span class="sourceLineNo">778</span>                        ClientProtos.GetRequest request =<a name="line.778"></a>
+<span class="sourceLineNo">779</span>                            RequestConverter.buildGetRequest(info.getRegionName(),<a name="line.779"></a>
+<span class="sourceLineNo">780</span>                                new Get(ROW_KEY));<a name="line.780"></a>
+<span class="sourceLineNo">781</span>                        rs.get(null, request);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>                        assignedRegions.add(info);<a name="line.782"></a>
+<span class="sourceLineNo">783</span>                      } catch(Exception ex) {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>                        LOG.debug("Caught exception while verifying group region", ex);<a name="line.784"></a>
+<span class="sourceLineNo">785</span>                      }<a name="line.785"></a>
+<span class="sourceLineNo">786</span>                    }<a name="line.786"></a>
+<span class="sourceLineNo">787</span>                    foundRegions.add(info);<a name="line.787"></a>
+<span class="sourceLineNo">788</span>                  }<a name="line.788"></a>
+<span class="sourceLineNo">789</span>                }<a name="line.789"></a>
+<span class="sourceLineNo">790</span>                return true;<a name="line.790"></a>
+<span class="sourceLineNo">791</span>              }<a name="line.791"></a>
+<span class="sourceLineNo">792</span>            };<a name="line.792"></a>
+<span class="sourceLineNo">793</span>            MetaTableAccessor.fullScanRegions(conn, visitor);<a name="line.793"></a>
+<span class="sourceLineNo">794</span>            // if no regions in meta then we have to create the table<a name="line.794"></a>
+<span class="sourceLineNo">795</span>            if (foundRegions.size() &lt; 1 &amp;&amp; rootMetaFound &amp;&amp; !createSent) {<a name="line.795"></a>
+<span class="sourceLineNo">796</span>              createRSGroupTable();<a name="line.796"></a>
+<span class="sourceLineNo">797</span>              createSent = true;<a name="line.797"></a>
+<span class="sourceLineNo">798</span>            }<a name="line.798"></a>
+<span class="sourceLineNo">799</span>            LOG.info("RSGroup table=" + RSGROUP_TABLE_NAME + " isOnline=" + found.get()<a name="line.799"></a>
+<span class="sourceLineNo">800</span>                + ", regionCount=" + foundRegions.size() + ", assignCount="<a name="line.800"></a>
+<span class="sourceLineNo">801</span>                + assignedRegions.size() + ", rootMetaFound=" + rootMetaFound);<a name="line.801"></a>
+<span class="sourceLineNo">802</span>            found.set(found.get() &amp;&amp; assignedRegions.size() == foundRegions.size()<a name="line.802"></a>
+<span class="sourceLineNo">803</span>                &amp;&amp; foundRegions.size() &gt; 0);<a name="line.803"></a>
+<span class="sourceLineNo">804</span>          } else {<a name="line.804"></a>
+<span class="sourceLineNo">805</span>            LOG.info("Waiting for catalog tables to come online");<a name="line.805"></a>
+<span class="sourceLineNo">806</span>            found.set(false);<a name="line.806"></a>
+<span class="sourceLineNo">807</span>          }<a name="line.807"></a>
+<span class="sourceLineNo">808</span>          if (found.get()) {<a name="line.808"></a>
+<span class="sourceLineNo">809</span>            LOG.debug("With group table online, refreshing cached information.");<a name="line.809"></a>
+<span class="sourceLineNo">810</span>            RSGroupInfoManagerImpl.this.refresh(true);<a name="line.810"></a>
+<span class="sourceLineNo">811</span>            online = true;<a name="line.811"></a>
+<span class="sourceLineNo">812</span>            //flush any inconsistencies between ZK and HTable<a name="line.812"></a>
+<span class="sourceLineNo">813</span>            RSGroupInfoManagerImpl.this.flushConfig();<a name="line.813"></a>
+<span class="sourceLineNo">814</span>          }<a name="line.814"></a>
+<span class="sourceLineNo">815</span>        } catch (RuntimeException e) {<a name="line.815"></a>
+<span class="sourceLineNo">816</span>          throw e;<a name="line.816"></a>
+<span class="sourceLineNo">817</span>        } catch(Exception e) {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>          found.set(false);<a name="line.818"></a>
+<span class="sourceLineNo">819</span>          LOG.warn("Failed to perform check", e);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>        }<a name="line.820"></a>
+<span class="sourceLineNo">821</span>        try {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>          Thread.sleep(100);<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        } catch (InterruptedException e) {<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          LOG.info("Sleep interrupted", e);<a name="line.824"></a>
+<span class="sourceLineNo">825</span>        }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>      return found.get();<a name="line.827"></a>
+<span class="sourceLineNo">828</span>    }<a name="line.828"></a>
+<span class="sourceLineNo">829</span><a name="line.829"></a>
+<span class="sourceLineNo">830</span>    private void createRSGroupTable() throws IOException {<a name="line.830"></a>
+<span class="sourceLineNo">831</span>      Long procId = masterServices.createSystemTable(RSGROUP_TABLE_DESC);<a name="line.831"></a>
+<span class="sourceLineNo">832</span>      // wait for region to be online<a name="line.832"></a>
+<span class="sourceLineNo">833</span>      int tries = 600;<a name="line.833"></a>
+<span class="sourceLineNo">834</span>      while (!(masterServices.getMasterProcedureExecutor().isFinished(procId))<a name="line.834"></a>
+<span class="sourceLineNo">835</span>          &amp;&amp; masterServices.getMasterProcedureExecutor().isRunning()<a name="line.835"></a>
+<span class="sourceLineNo">836</span>          &amp;&amp; tries &gt; 0) {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>        try {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>          Thread.sleep(100);<a name="line.838"></a>
+<span class="sourceLineNo">839</span>        } catch (InterruptedException e) {<a name="line.839"></a>
+<span class="sourceLineNo">840</span>          throw new IOException("Wait interrupted ", e);<a name="line.840"></a>
+<span class="sourceLineNo">841</span>        }<a name="line.841"></a>
+<span class="sourceLineNo">842</span>        tries--;<a name="line.842"></a>
+<span class="sourceLineNo">843</span>      }<a name="line.843"></a>
+<span class="sourceLineNo">844</span>      if(tries &lt;= 0) {<a name="line.844"></a>
+<span class="sourceLineNo">845</span>        throw new IOException("Failed to create group table in a given time.");<a name="line.845"></a>
+<span class="sourceLineNo">846</span>      } else {<a name="line.846"></a>
+<span class="sourceLineNo">847</span>        Procedure&lt;?&gt; result = masterServices.getMasterProcedureExecutor().getResult(procId);<a name="line.847"></a>
+<span class="sourceLineNo">848</span>        if (result != null &amp;&amp; result.isFailed()) {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>          throw new IOException("Failed to create group table. " +<a name="line.849"></a>
+<span class="sourceLineNo">850</span>              MasterProcedureUtil.unwrapRemoteIOException(result));<a name="line.850"></a>
+<span class="sourceLineNo">851</span>        }<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      }<a name="line.852"></a>
+<span class="sourceLineNo">853</span>    }<a name="line.853"></a>
+<span class="sourceLineNo">854</span><a name="line.854"></a>
+<span class="sourceLineNo">855</span>    public boolean isOnline() {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      return online;<a name="line.856"></a>
+<span class="sourceLineNo">857</span>    }<a name="line.857"></a>
+<span class="sourceLineNo">858</span>  }<a name="line.858"></a>
+<span class="sourceLineNo">859</span><a name="line.859"></a>
+<span class="sourceLineNo">860</span>  private static boolean isMasterRunning(MasterServices masterServices) {<a name="line.860"></a>
+<span class="sourceLineNo">861</span>    return !masterServices.isAborted() &amp;&amp; !masterServices.isStopped();<a name="line.861"></a>
+<span class="sourceLineNo">862</span>  }<a name="line.862"></a>
+<span class="sourceLineNo">863</span><a name="line.863"></a>
+<span class="sourceLineNo">864</span>  private void multiMutate(List&lt;Mutation&gt; mutations) throws IOException {<a name="line.864"></a>
+<span class="sourceLineNo">865</span>    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);<a name="line.865"></a>
+<span class="sourceLineNo">866</span>    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder<a name="line.866"></a>
+<span class="sourceLineNo">867</span>      = MultiRowMutationProtos.MutateRowsRequest.newBuilder();<a name="line.867"></a>
+<span class="sourceLineNo">868</span>    for (Mutation mutation : mutations) {<a name="line.868"></a>
+<span class="sourceLineNo">869</span>      if (mutation instanceof Put) {<a name="line.869"></a>
+<span class="sourceLineNo">870</span>        mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.870"></a>
+<span class="sourceLineNo">871</span>            org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT,<a name="line.871"></a>
+<span class="sourceLineNo">872</span>            mutation));<a name="line.872"></a>
+<span class="sourceLineNo">873</span>      } else if (mutation instanceof Delete) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>        mmrBuilder.addMutationRequest(<a name="line.874"></a>
+<span class="sourceLineNo">875</span>            org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(<a name="line.875"></a>
+<span class="sourceLineNo">876</span>                org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.<a name="line.876"></a>
+<span class="sourceLineNo">877</span>                  MutationType.DELETE, mutation));<a name="line.877"></a>
+<span class="sourceLineNo">878</span>      } else {<a name="line.878"></a>
+<span class="sourceLineNo">879</span>        throw new DoNotRetryIOException("multiMutate doesn't support "<a name="line.879"></a>
+<span class="sourceLineNo">880</span>          + mutation.getClass().getName());<a name="line.880"></a>
+<span class="sourceLineNo">881</span>      }<a name="line.881"></a>
+<span class="sourceLineNo">882</span>    }<a name="line.882"></a>
+<span class="sourceLineNo">883</span><a name="line.883"></a>
+<span class="sourceLineNo">884</span>    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service =<a name="line.884"></a>
+<span class="sourceLineNo">885</span>      MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);<a name="line.885"></a>
+<span class="sourceLineNo">886</span>    try {<a name="line.886"></a>
+<span class="sourceLineNo">887</span>      service.mutateRows(null, mmrBuilder.build());<a name="line.887"></a>
+<span class="sourceLineNo">888</span>    } catch (ServiceException ex) {<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      ProtobufUtil.toIOException(ex);<a name="line.889"></a>
+<span class="sourceLineNo">890</span>    }<a name="line.890"></a>
+<span class="sourceLineNo">891</span>  }<a name="line.891"></a>
+<span class="sourceLineNo">892</span><a name="line.892"></a>
+<span class="sourceLineNo">893</span>  private void checkGroupName(String groupName) throws ConstraintException {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>    if (!groupName.matches("[a-zA-Z0-9_]+")) {<a name="line.894"></a>
+<span class="sourceLineNo">895</span>      throw new ConstraintException("RSGroup name should only contain alphanumeric characters");<a name="line.895"></a>
+<span class="sourceLineNo">896</span>    }<a name="line.896"></a>
+<span class="sourceLineNo">897</span>  }<a name="line.897"></a>
+<span class="sourceLineNo">898</span>}<a name="line.898"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/Utility.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/Utility.html b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/Utility.html
index 714d140..bbc4d94 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/Utility.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/rsgroup/Utility.html
@@ -27,38 +27,230 @@
 <span class="sourceLineNo">019</span> */<a name="line.19"></a>
 <span class="sourceLineNo">020</span>package org.apache.hadoop.hbase.rsgroup;<a name="line.20"></a>
 <span class="sourceLineNo">021</span><a name="line.21"></a>
-<span class="sourceLineNo">022</span>import java.util.HashSet;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import java.util.Set;<a name="line.23"></a>
-<span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.ServerName;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.net.Address;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.28"></a>
-<span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>/**<a name="line.30"></a>
-<span class="sourceLineNo">031</span> * Utility for this RSGroup package in hbase-rsgroup.<a name="line.31"></a>
-<span class="sourceLineNo">032</span> */<a name="line.32"></a>
-<span class="sourceLineNo">033</span>@InterfaceAudience.Private<a name="line.33"></a>
-<span class="sourceLineNo">034</span>final class Utility {<a name="line.34"></a>
-<span class="sourceLineNo">035</span>  private Utility() {<a name="line.35"></a>
-<span class="sourceLineNo">036</span>  }<a name="line.36"></a>
-<span class="sourceLineNo">037</span><a name="line.37"></a>
-<span class="sourceLineNo">038</span>  /**<a name="line.38"></a>
-<span class="sourceLineNo">039</span>   * @param master the master to get online servers for<a name="line.39"></a>
-<span class="sourceLineNo">040</span>   * @return Set of online Servers named for their hostname and port (not ServerName).<a name="line.40"></a>
-<span class="sourceLineNo">041</span>   */<a name="line.41"></a>
-<span class="sourceLineNo">042</span>  static Set&lt;Address&gt; getOnlineServers(final MasterServices master) {<a name="line.42"></a>
-<span class="sourceLineNo">043</span>    Set&lt;Address&gt; onlineServers = new HashSet&lt;Address&gt;();<a name="line.43"></a>
-<span class="sourceLineNo">044</span>    if (master == null) {<a name="line.44"></a>
-<span class="sourceLineNo">045</span>      return onlineServers;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>    }<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>    for(ServerName server: master.getServerManager().getOnlineServers().keySet()) {<a name="line.48"></a>
-<span class="sourceLineNo">049</span>      onlineServers.add(server.getAddress());<a name="line.49"></a>
-<span class="sourceLineNo">050</span>    }<a name="line.50"></a>
-<span class="sourceLineNo">051</span>    return onlineServers;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  }<a name="line.52"></a>
-<span class="sourceLineNo">053</span>}<a name="line.53"></a>
+<span class="sourceLineNo">022</span>import java.io.EOFException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.net.ConnectException;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.net.NoRouteToHostException;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.net.SocketException;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.net.SocketTimeoutException;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.net.UnknownHostException;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.util.HashSet;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.Locale;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.Set;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.ServerName;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.ClusterConnection;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.client.RetriesExhaustedException;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.ipc.FailedServerException;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.net.Address;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.slf4j.Logger;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.slf4j.LoggerFactory;<a name="line.51"></a>
+<span class="sourceLineNo">052</span><a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>/**<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * Utility for this RSGroup package in hbase-rsgroup.<a name="line.58"></a>
+<span class="sourceLineNo">059</span> */<a name="line.59"></a>
+<span class="sourceLineNo">060</span>@InterfaceAudience.Private<a name="line.60"></a>
+<span class="sourceLineNo">061</span>final class Utility {<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  private static final Logger LOG = LoggerFactory.getLogger(Utility.class);<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private Utility() {<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  }<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>  /**<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * @param master the master to get online servers for<a name="line.69"></a>
+<span class="sourceLineNo">070</span>   * @return Set of online Servers named for their hostname and port (not ServerName).<a name="line.70"></a>
+<span class="sourceLineNo">071</span>   */<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  static Set&lt;Address&gt; getOnlineServers(final MasterServices master) {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    Set&lt;Address&gt; onlineServers = new HashSet&lt;Address&gt;();<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    if (master == null) {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      return onlineServers;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    }<a name="line.76"></a>
+<span class="sourceLineNo">077</span><a name="line.77"></a>
+<span class="sourceLineNo">078</span>    for (ServerName server : master.getServerManager().getOnlineServers().keySet()) {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      onlineServers.add(server.getAddress());<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    }<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    return onlineServers;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * Verify &lt;code&gt;hbase:meta&lt;/code&gt; is deployed and accessible.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   * @param hConnection the connection to use<a name="line.86"></a>
+<span class="sourceLineNo">087</span>   * @param zkw reference to the {@link ZKWatcher} which also contains configuration and operation<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   * @param timeout How long to wait on zk for meta address (passed through to the internal call to<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   *          {@link #getMetaServerConnection}.<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   * @return True if the &lt;code&gt;hbase:meta&lt;/code&gt; location is healthy.<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * @throws IOException if the number of retries for getting the connection is exceeded<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * @throws InterruptedException if waiting for the socket operation fails<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   */<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  public static boolean verifyMetaRegionLocation(ClusterConnection hConnection, ZKWatcher zkw,<a name="line.94"></a>
+<span class="sourceLineNo">095</span>      final long timeout) throws InterruptedException, IOException {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    return verifyMetaRegionLocation(hConnection, zkw, timeout, RegionInfo.DEFAULT_REPLICA_ID);<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  }<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  /**<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * Verify &lt;code&gt;hbase:meta&lt;/code&gt; is deployed and accessible.<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   * @param connection the connection to use<a name="line.101"></a>
+<span class="sourceLineNo">102</span>   * @param zkw reference to the {@link ZKWatcher} which also contains configuration and operation<a name="line.102"></a>
+<span class="sourceLineNo">103</span>   * @param timeout How long to wait on zk for meta address (passed through to<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * @param replicaId the ID of the replica<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * @return True if the &lt;code&gt;hbase:meta&lt;/code&gt; location is healthy.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * @throws InterruptedException if waiting for the socket operation fails<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   * @throws IOException if the number of retries for getting the connection is exceeded<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   */<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  public static boolean verifyMetaRegionLocation(ClusterConnection connection, ZKWatcher zkw,<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      final long timeout, int replicaId) throws InterruptedException, IOException {<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    AdminProtos.AdminService.BlockingInterface service = null;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    try {<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      service = getMetaServerConnection(connection, zkw, timeout, replicaId);<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    } catch (NotAllMetaRegionsOnlineException e) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      // Pass<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    } catch (ServerNotRunningYetException e) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      // Pass -- remote server is not up so can't be carrying root<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    } catch (UnknownHostException e) {<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      // Pass -- server name doesn't resolve so it can't be assigned anything.<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    } catch (RegionServerStoppedException e) {<a name="line.120"></a>
+<span class="sourceLineNo">121</span>      // Pass -- server name sends us to a server that is dying or already dead.<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    }<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    return (service != null) &amp;&amp; verifyRegionLocation(connection, service,<a name="line.123"></a>
+<span class="sourceLineNo">124</span>      MetaTableLocator.getMetaRegionLocation(zkw, replicaId),<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      RegionReplicaUtil.getRegionInfoForReplica(RegionInfoBuilder.FIRST_META_REGIONINFO, replicaId)<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        .getRegionName());<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  }<a name="line.127"></a>
+<span class="sourceLineNo">128</span><a name="line.128"></a>
+<span class="sourceLineNo">129</span>  /**<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   * Verify we can connect to &lt;code&gt;hostingServer&lt;/code&gt; and that its carrying<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * &lt;code&gt;regionName&lt;/code&gt;.<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * @param hostingServer Interface to the server hosting &lt;code&gt;regionName&lt;/code&gt;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   * @param address The servername that goes with the &lt;code&gt;metaServer&lt;/code&gt; interface. Used<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   *          logging.<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * @param regionName The regionname we are interested in.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   * @return True if we were able to verify the region located at other side of the interface.<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   */<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  // TODO: We should be able to get the ServerName from the AdminProtocol<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  // rather than have to pass it in. Its made awkward by the fact that the<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  // HRI is likely a proxy against remote server so the getServerName needs<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  // to be fixed to go to a local method or to a cache before we can do this.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  private static boolean verifyRegionLocation(final ClusterConnection connection,<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      AdminService.BlockingInterface hostingServer, final ServerName address,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>      final byte[] regionName) {<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    if (hostingServer == null) {<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      LOG.info("Passed hostingServer is null");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      return false;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    Throwable t;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    HBaseRpcController controller = connection.getRpcControllerFactory().newController();<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    try {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      // Try and get regioninfo from the hosting server.<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      return ProtobufUtil.getRegionInfo(controller, hostingServer, regionName) != null;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    } catch (ConnectException e) {<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      t = e;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>    } catch (RetriesExhaustedException e) {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      t = e;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    } catch (RemoteException e) {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      IOException ioe = e.unwrapRemoteException();<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      t = ioe;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    } catch (IOException e) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>      Throwable cause = e.getCause();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      if (cause != null &amp;&amp; cause instanceof EOFException) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        t = cause;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      } else if (cause != null &amp;&amp; cause.getMessage() != null &amp;&amp;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>        cause.getMessage().contains("Connection reset")) {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>        t = cause;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      } else {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        t = e;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    LOG.info("Failed verification of " + Bytes.toStringBinary(regionName) + " at address=" +<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      address + ", exception=" + t.getMessage());<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    return false;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>  /**<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   * specified timeout for availability.<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   * WARNING: Does not retry. Use an {@link org.apache.hadoop.hbase.client.HTable} instead.<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   * @param connection the connection to use<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * @param zkw reference to the {@link ZKWatcher} which also contains configuration and operation<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   * @param timeout How long to wait on meta location<a name="line.184"></a>
+<span class="sourceLineNo">185</span>   * @param replicaId the ID of the replica<a name="line.185"></a>
+<span class="sourceLineNo">186</span>   * @return connection to server hosting meta<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * @throws InterruptedException if waiting for the socket operation fails<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * @throws IOException if the number of retries for getting the connection is exceeded<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   */<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  private static AdminService.BlockingInterface getMetaServerConnection(<a name="line.190"></a>
+<span class="sourceLineNo">191</span>      ClusterConnection connection, ZKWatcher zkw, long timeout, int replicaId)<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      throws InterruptedException, IOException {<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    return getCachedConnection(connection,<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      MetaTableLocator.waitMetaRegionLocation(zkw, replicaId, timeout));<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  /**<a name="line.197"></a>
+<span class="sourceLineNo">198</span>   * @param sn ServerName to get a connection against.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>   * @return The AdminProtocol we got when we connected to &lt;code&gt;sn&lt;/code&gt; May have come from cache,<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   *         may not be good, may have been setup by this invocation, or may be null.<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @throws IOException if the number of retries for getting the connection is exceeded<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   */<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  private static AdminService.BlockingInterface getCachedConnection(ClusterConnection connection,<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      ServerName sn) throws IOException {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    if (sn == null) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      return null;<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    AdminService.BlockingInterface service = null;<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    try {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      service = connection.getAdmin(sn);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    } catch (RetriesExhaustedException e) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      if (e.getCause() != null &amp;&amp; e.getCause() instanceof ConnectException) {<a name="line.212"></a>
+<span class="sourceLineNo">213</span>        LOG.debug("Catch this; presume it means the cached connection has gone bad.");<a name="line.213"></a>
+<span class="sourceLineNo">214</span>      } else {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        throw e;<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      }<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    } catch (SocketTimeoutException e) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      LOG.debug("Timed out connecting to " + sn);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    } catch (NoRouteToHostException e) {<a name="line.219"></a>
+<span class="sourceLineNo">220</span>      LOG.debug("Connecting to " + sn, e);<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    } catch (SocketException e) {<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      LOG.debug("Exception connecting to " + sn);<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    } catch (UnknownHostException e) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      LOG.debug("Unknown host exception connecting to  " + sn);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    } catch (FailedServerException e) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      if (LOG.isDebugEnabled()) {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        LOG.debug("Server " + sn + " is in failed server list.");<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    } catch (IOException ioe) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      Throwable cause = ioe.getCause();<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      if (ioe instanceof ConnectException) {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        LOG.debug("Catch. Connect refused.");<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      } else if (cause != null &amp;&amp; cause instanceof EOFException) {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>        LOG.debug("Catch. Other end disconnected us.");<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      } else if (cause != null &amp;&amp; cause.getMessage() != null &amp;&amp;<a name="line.235"></a>
+<span class="sourceLineNo">236</span>        cause.getMessage().toLowerCase(Locale.ROOT).contains("connection reset")) {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        LOG.debug("Catch. Connection reset.");<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      } else {<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        throw ioe;<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      }<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>    }<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    return service;<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  }<a name="line.244"></a>
+<span class="sourceLineNo">245</span>}<a name="line.245"></a>
 
 
 


[46/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
index dcf1bc9..4c3e262 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.397">MasterRpcServices.BalanceSwitchMode</a>
+<pre>static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.398">MasterRpcServices.BalanceSwitchMode</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master">MasterRpcServices.BalanceSwitchMode</a>&gt;</pre>
 </li>
 </ul>
@@ -210,7 +210,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SYNC</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master">MasterRpcServices.BalanceSwitchMode</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html#line.398">SYNC</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master">MasterRpcServices.BalanceSwitchMode</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html#line.399">SYNC</a></pre>
 </li>
 </ul>
 <a name="ASYNC">
@@ -219,7 +219,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ASYNC</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master">MasterRpcServices.BalanceSwitchMode</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html#line.399">ASYNC</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master">MasterRpcServices.BalanceSwitchMode</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html#line.400">ASYNC</a></pre>
 </li>
 </ul>
 </li>


[13/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()),

<TRUNCATED>

[51/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/27555316
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/27555316
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/27555316

Branch: refs/heads/asf-site
Commit: 27555316811b8f031d18508100f39b4345de49ce
Parents: e467988
Author: jenkins <bu...@apache.org>
Authored: Wed Dec 5 14:52:35 2018 +0000
Committer: jenkins <bu...@apache.org>
Committed: Wed Dec 5 14:52:35 2018 +0000

----------------------------------------------------------------------
 acid-semantics.html                             |     4 +-
 apache_hbase_reference_guide.pdf                |     4 +-
 book.html                                       |     2 +-
 bulk-loads.html                                 |     4 +-
 checkstyle-aggregate.html                       | 16482 ++++++++---------
 checkstyle.rss                                  |    32 +-
 coc.html                                        |     4 +-
 dependencies.html                               |     4 +-
 dependency-convergence.html                     |     4 +-
 dependency-info.html                            |     4 +-
 dependency-management.html                      |     4 +-
 devapidocs/constant-values.html                 |     4 +-
 devapidocs/index-all.html                       |    73 +-
 devapidocs/org/apache/hadoop/hbase/Server.html  |    51 +-
 .../hadoop/hbase/backup/package-tree.html       |     4 +-
 .../NotAllMetaRegionsOnlineException.html       |     8 +-
 .../hadoop/hbase/class-use/ServerName.html      |    76 +-
 .../client/class-use/ClusterConnection.html     |   101 +-
 .../hbase/client/class-use/RegionInfo.html      |    12 +-
 .../hadoop/hbase/client/package-tree.html       |    26 +-
 .../apache/hadoop/hbase/client/package-use.html |     5 -
 .../hadoop/hbase/executor/package-tree.html     |     2 +-
 .../hadoop/hbase/filter/package-tree.html       |     8 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |     6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |     2 +-
 .../hadoop/hbase/mapreduce/package-tree.html    |     4 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |     6 +-
 .../master/HMasterCommandLine.LocalHMaster.html |     6 +-
 .../hbase/master/MasterMetaBootstrap.html       |     2 +-
 .../MasterRpcServices.BalanceSwitchMode.html    |     6 +-
 .../hadoop/hbase/master/MasterRpcServices.html  |   230 +-
 .../hadoop/hbase/master/MasterServices.html     |     2 +-
 .../hbase/master/MasterStatusServlet.html       |    12 +-
 .../hadoop/hbase/master/package-tree.html       |     4 +-
 .../hbase/master/procedure/package-tree.html    |     2 +-
 .../hadoop/hbase/monitoring/package-tree.html   |     2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |    18 +-
 .../hadoop/hbase/procedure2/package-tree.html   |     6 +-
 .../hadoop/hbase/quotas/package-tree.html       |     8 +-
 .../HRegionServer.CompactionChecker.html        |    14 +-
 .../HRegionServer.MovedRegionInfo.html          |    16 +-
 .../HRegionServer.MovedRegionsCleaner.html      |    16 +-
 .../HRegionServer.PeriodicMemStoreFlusher.html  |    14 +-
 ...RegionServer.SystemExitWhenAbortTimeout.html |     6 +-
 .../hbase/regionserver/HRegionServer.html       |   883 +-
 .../regionserver/RegionServerServices.html      |     2 +-
 .../hadoop/hbase/regionserver/package-tree.html |    20 +-
 .../regionserver/querymatcher/package-tree.html |     2 +-
 .../ReplicationSyncUp.DummyServer.html          |    76 +-
 .../regionserver/ReplicationSyncUp.html         |    10 +-
 .../replication/regionserver/package-tree.html  |     2 +-
 .../hadoop/hbase/rest/model/package-tree.html   |     2 +-
 ...oupInfoManagerImpl.RSGroupStartupWorker.html |     4 +-
 .../hbase/rsgroup/RSGroupInfoManagerImpl.html   |     6 +-
 .../apache/hadoop/hbase/rsgroup/Utility.html    |   222 +-
 .../hbase/security/access/package-tree.html     |     4 +-
 .../hadoop/hbase/security/package-tree.html     |     2 +-
 .../hadoop/hbase/thrift/package-tree.html       |     2 +-
 ...BaseFsck.CheckRegionConsistencyWorkItem.html |    10 +-
 .../HBaseFsck.ErrorReporter.ERROR_CODE.html     |    80 +-
 .../hbase/util/HBaseFsck.ErrorReporter.html     |    30 +-
 .../hbase/util/HBaseFsck.FileLockCallable.html  |    16 +-
 .../hbase/util/HBaseFsck.HBaseFsckTool.html     |     6 +-
 .../hadoop/hbase/util/HBaseFsck.HbckInfo.html   |    56 +-
 .../hadoop/hbase/util/HBaseFsck.HdfsEntry.html  |    14 +-
 .../hadoop/hbase/util/HBaseFsck.MetaEntry.html  |    18 +-
 .../hbase/util/HBaseFsck.OnlineEntry.html       |    10 +-
 .../util/HBaseFsck.PrintingErrorReporter.html   |    42 +-
 .../HBaseFsck.RegionBoundariesInformation.html  |    16 +-
 .../util/HBaseFsck.RegionRepairException.html   |     8 +-
 .../HBaseFsck.TableInfo.HDFSIntegrityFixer.html |    22 +-
 ...aseFsck.TableInfo.IntegrityFixSuggester.html |    20 +-
 .../hadoop/hbase/util/HBaseFsck.TableInfo.html  |    38 +-
 .../hbase/util/HBaseFsck.WorkItemHdfsDir.html   |    12 +-
 .../util/HBaseFsck.WorkItemHdfsRegionInfo.html  |    12 +-
 .../util/HBaseFsck.WorkItemOverlapMerge.html    |    10 +-
 .../hbase/util/HBaseFsck.WorkItemRegion.html    |    16 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.html |   654 +-
 .../hadoop/hbase/util/class-use/Pair.html       |     6 +-
 .../apache/hadoop/hbase/util/package-tree.html  |     8 +-
 .../hbase/zookeeper/MetaTableLocator.html       |   426 +-
 .../zookeeper/ZKUtil.JaasConfiguration.html     |    30 +-
 .../hbase/zookeeper/ZKUtil.NodeAndData.html     |    16 +-
 .../ZKUtil.ZKUtilOp.CreateAndFailSilent.html    |    12 +-
 .../ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html   |     8 +-
 .../zookeeper/ZKUtil.ZKUtilOp.SetData.html      |    18 +-
 .../hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html |    16 +-
 .../apache/hadoop/hbase/zookeeper/ZKUtil.html   |   146 +-
 .../zookeeper/class-use/MetaTableLocator.html   |   102 +-
 .../hbase/zookeeper/class-use/ZKWatcher.html    |   159 +-
 .../hadoop/hbase/zookeeper/package-summary.html |     4 +-
 .../hadoop/hbase/zookeeper/package-use.html     |    20 +-
 .../org/apache/hadoop/hbase/Server.html         |   211 +-
 .../org/apache/hadoop/hbase/Version.html        |     4 +-
 .../hbase/master/MasterMetaBootstrap.html       |    97 +-
 .../MasterRpcServices.BalanceSwitchMode.html    |  4799 ++---
 .../hadoop/hbase/master/MasterRpcServices.html  |  4799 ++---
 .../hbase/master/MasterStatusServlet.html       |   144 +-
 .../procedure/ProcedureSyncWait.Predicate.html  |    26 +-
 .../ProcedureSyncWait.ProcedureFuture.html      |    26 +-
 .../master/procedure/ProcedureSyncWait.html     |    26 +-
 .../master/snapshot/MasterSnapshotVerifier.html |    44 +-
 .../master/snapshot/TakeSnapshotHandler.html    |    74 +-
 .../flush/MasterFlushTableProcedureManager.html |    56 +-
 .../HRegionServer.CompactionChecker.html        |  6990 ++++---
 .../HRegionServer.MovedRegionInfo.html          |  6990 ++++---
 .../HRegionServer.MovedRegionsCleaner.html      |  6990 ++++---
 .../HRegionServer.PeriodicMemStoreFlusher.html  |  6990 ++++---
 ...RegionServer.SystemExitWhenAbortTimeout.html |  6990 ++++---
 .../hbase/regionserver/HRegionServer.html       |  6990 ++++---
 .../ReplicationSyncUp.DummyServer.html          |   306 +-
 .../regionserver/ReplicationSyncUp.html         |   306 +-
 ...InfoManagerImpl.FailedOpenUpdaterThread.html |   279 +-
 ...oupInfoManagerImpl.RSGroupStartupWorker.html |   279 +-
 ...oManagerImpl.ServerEventsListenerThread.html |   279 +-
 .../hbase/rsgroup/RSGroupInfoManagerImpl.html   |   279 +-
 .../apache/hadoop/hbase/rsgroup/Utility.html    |   256 +-
 ...BaseFsck.CheckRegionConsistencyWorkItem.html | 10258 +++++-----
 .../HBaseFsck.ErrorReporter.ERROR_CODE.html     | 10258 +++++-----
 .../hbase/util/HBaseFsck.ErrorReporter.html     | 10258 +++++-----
 .../hbase/util/HBaseFsck.FileLockCallable.html  | 10258 +++++-----
 .../hbase/util/HBaseFsck.HBaseFsckTool.html     | 10258 +++++-----
 .../hadoop/hbase/util/HBaseFsck.HbckInfo.html   | 10258 +++++-----
 .../hadoop/hbase/util/HBaseFsck.HdfsEntry.html  | 10258 +++++-----
 .../hadoop/hbase/util/HBaseFsck.MetaEntry.html  | 10258 +++++-----
 .../hbase/util/HBaseFsck.OnlineEntry.html       | 10258 +++++-----
 .../util/HBaseFsck.PrintingErrorReporter.html   | 10258 +++++-----
 .../HBaseFsck.RegionBoundariesInformation.html  | 10258 +++++-----
 .../util/HBaseFsck.RegionRepairException.html   | 10258 +++++-----
 .../HBaseFsck.TableInfo.HDFSIntegrityFixer.html | 10258 +++++-----
 ...aseFsck.TableInfo.IntegrityFixSuggester.html | 10258 +++++-----
 .../hadoop/hbase/util/HBaseFsck.TableInfo.html  | 10258 +++++-----
 .../hbase/util/HBaseFsck.WorkItemHdfsDir.html   | 10258 +++++-----
 .../util/HBaseFsck.WorkItemHdfsRegionInfo.html  | 10258 +++++-----
 .../util/HBaseFsck.WorkItemOverlapMerge.html    | 10258 +++++-----
 .../hbase/util/HBaseFsck.WorkItemRegion.html    | 10258 +++++-----
 .../org/apache/hadoop/hbase/util/HBaseFsck.html | 10258 +++++-----
 .../hbase/zookeeper/MetaTableLocator.html       |  1017 +-
 .../zookeeper/ZKUtil.JaasConfiguration.html     |  4203 +++--
 .../hbase/zookeeper/ZKUtil.NodeAndData.html     |  4203 +++--
 .../ZKUtil.ZKUtilOp.CreateAndFailSilent.html    |  4203 +++--
 .../ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html   |  4203 +++--
 .../zookeeper/ZKUtil.ZKUtilOp.SetData.html      |  4203 +++--
 .../hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html |  4203 +++--
 .../apache/hadoop/hbase/zookeeper/ZKUtil.html   |  4203 +++--
 downloads.html                                  |     4 +-
 export_control.html                             |     4 +-
 index.html                                      |     4 +-
 integration.html                                |     4 +-
 issue-tracking.html                             |     4 +-
 license.html                                    |     4 +-
 mail-lists.html                                 |     4 +-
 metrics.html                                    |     4 +-
 old_news.html                                   |     4 +-
 plugin-management.html                          |     4 +-
 plugins.html                                    |     4 +-
 poweredbyhbase.html                             |     4 +-
 project-info.html                               |     4 +-
 project-reports.html                            |     4 +-
 project-summary.html                            |     4 +-
 pseudo-distributed.html                         |     4 +-
 replication.html                                |     4 +-
 resources.html                                  |     4 +-
 source-repository.html                          |     4 +-
 sponsors.html                                   |     4 +-
 supportingprojects.html                         |     4 +-
 team-list.html                                  |     4 +-
 testdevapidocs/allclasses-frame.html            |     1 +
 testdevapidocs/allclasses-noframe.html          |     1 +
 testdevapidocs/index-all.html                   |    71 +-
 ...aseCluster.MiniHBaseClusterRegionServer.html |     4 +-
 .../hadoop/hbase/MockRegionServerServices.html  |   217 +-
 ...entOperationTimeout.DelayedRegionServer.html |     4 +-
 .../hbase/TestLocalHBaseCluster.MyHMaster.html  |     6 +-
 .../TestLocalHBaseCluster.MyHRegionServer.html  |     4 +-
 ...estMetaTableAccessor.SpyingRpcScheduler.html |     8 +-
 ...TableAccessor.SpyingRpcSchedulerFactory.html |     6 +-
 .../hadoop/hbase/TestMetaTableAccessor.html     |    40 +-
 .../TestMetaTableLocator.WaitOnMetaThread.html  |     8 +-
 .../hadoop/hbase/TestMetaTableLocator.html      |   209 +-
 ...ovedRegionsCleaner.TestMockRegionServer.html |     4 +-
 .../hbase/class-use/HBaseClassTestRule.html     |    16 +-
 .../hbase/class-use/HBaseTestingUtility.html    |     4 +
 ...rRPCTimeout.RegionServerWithScanTimeout.html |     4 +-
 ...taCache.RegionServerWithFakeRpcServices.html |     4 +-
 .../hbase/client/TestMetaWithReplicas.html      |    16 +-
 .../hbase/master/MockNoopMasterServices.html    |   241 +-
 .../MockRegionServer.RegionNameAndIndex.html    |    12 +-
 .../hadoop/hbase/master/MockRegionServer.html   |   313 +-
 .../TestActiveMasterManager.DummyMaster.html    |    71 +-
 ...ctiveMasterManager.NodeDeletionListener.html |    14 +-
 ...ctiveMasterManager.WaitToBeMasterThread.html |    12 +-
 .../hbase/master/TestActiveMasterManager.html   |    20 +-
 .../TestCloseAnOpeningRegion.MockHMaster.html   |     6 +-
 .../master/TestGetReplicationLoad.MyMaster.html |     6 +-
 .../master/TestMasterMetrics.MyMaster.html      |     6 +-
 .../TestMasterMetrics.MyRegionServer.html       |     4 +-
 .../TestMetaShutdownHandler.MyRegionServer.html |     4 +-
 .../TestShutdownBackupMaster.MockHMaster.html   |     6 +-
 ...TestSplitLogManager.DummyMasterServices.html |     2 +-
 .../master/assignment/MockMasterServices.html   |     2 +-
 ...dToMultipleRegionServers.HMasterForTest.html |     6 +-
 ...tReportOnlineRegionsRace.HMasterForTest.html |     6 +-
 ...TransitionFromDeadServer.HMasterForTest.html |     6 +-
 ...gionStateTransitionRetry.HMasterForTest.html |     6 +-
 .../cleaner/TestHFileCleaner.DummyServer.html   |    61 +-
 .../hbase/master/cleaner/TestHFileCleaner.html  |    30 +-
 .../TestHFileLinkCleaner.DummyServer.html       |    61 +-
 .../master/cleaner/TestHFileLinkCleaner.html    |    14 +-
 .../cleaner/TestLogsCleaner.DummyServer.html    |    61 +-
 .../TestLogsCleaner.FaultyZooKeeperWatcher.html |    10 +-
 .../hbase/master/cleaner/TestLogsCleaner.html   |    24 +-
 ...TestReplicationHFileCleaner.DummyServer.html |    61 +-
 ...tionHFileCleaner.FaultyZooKeeperWatcher.html |    10 +-
 .../cleaner/TestReplicationHFileCleaner.html    |    38 +-
 ...PeerWorkerWhenRestarting.HMasterForTest.html |     6 +-
 .../org/apache/hadoop/hbase/package-tree.html   |    12 +-
 .../hadoop/hbase/procedure/package-tree.html    |     8 +-
 .../hadoop/hbase/procedure2/package-tree.html   |     2 +-
 .../hbase/regionserver/OOMERegionServer.html    |     4 +-
 ...onInDeadRegionServer.IgnoreYouAreDeadRS.html |     4 +-
 .../TestHeapMemoryManager.BlockCacheStub.html   |    48 +-
 ...HeapMemoryManager.CustomHeapMemoryTuner.html |    14 +-
 ...stHeapMemoryManager.MemstoreFlusherStub.html |    20 +-
 ...emoryManager.RegionServerAccountingStub.html |    18 +-
 .../TestHeapMemoryManager.RegionServerStub.html |    65 +-
 .../regionserver/TestHeapMemoryManager.html     |    46 +-
 ...Initializing.RegisterAndDieRegionServer.html |     4 +-
 ...egionMergeTransactionOnCluster.MyMaster.html |     6 +-
 .../regionserver/TestRegionServerNoMaster.html  |    24 +-
 ...egionServerReportForDuty.MyRegionServer.html |     4 +-
 ...verReportForDuty.NeverInitializedMaster.html |     6 +-
 ...eartbeatMessages.HeartbeatHRegionServer.html |     4 +-
 ...stShutdownWhileWALBroken.MyRegionServer.html |     4 +-
 .../TestSplitLogWorker.DummyServer.html         |    67 +-
 .../hbase/regionserver/TestSplitLogWorker.html  |    52 +-
 .../TestSplitTransactionOnCluster.MyMaster.html |     6 +-
 .../regionserver/TestWALLockup.DodgyFSLog.html  |    14 +-
 .../regionserver/TestWALLockup.DummyServer.html |    67 +-
 .../TestWALLockup.DummyWALActionsListener.html  |     6 +-
 .../hbase/regionserver/TestWALLockup.html       |    36 +-
 .../hadoop/hbase/regionserver/package-tree.html |     4 +-
 ...stReplicationProcedureRetry.MockHMaster.html |     6 +-
 ...nTrackerZKImpl.DummyReplicationListener.html |     6 +-
 ...estReplicationTrackerZKImpl.DummyServer.html |    67 +-
 .../TestReplicationTrackerZKImpl.html           |    34 +-
 ...icationSource.ShutdownDelayRegionServer.html |     4 +-
 ...onSourceManager.DummyNodeFailoverWorker.html |    16 +-
 ...estReplicationSourceManager.DummyServer.html |    65 +-
 ...er.FailInitializeDummyReplicationSource.html |     6 +-
 .../TestReplicationSourceManager.html           |   100 +-
 .../hbase/rsgroup/TestRSGroupsWithACL.html      |     4 +-
 .../hadoop/hbase/rsgroup/TestUtility.html       |   572 +
 .../rsgroup/VerifyingRSGroupAdminClient.html    |     4 +-
 .../hbase/rsgroup/class-use/TestUtility.html    |   125 +
 .../hadoop/hbase/rsgroup/package-frame.html     |     1 +
 .../hadoop/hbase/rsgroup/package-summary.html   |     4 +
 .../hadoop/hbase/rsgroup/package-tree.html      |     1 +
 ...entication.NonShadedBlockingRpcCallback.html |    12 +-
 .../TestTokenAuthentication.TokenServer.html    |   107 +-
 .../security/token/TestTokenAuthentication.html |    28 +-
 .../apache/hadoop/hbase/test/package-tree.html  |     4 +-
 .../apache/hadoop/hbase/util/MockServer.html    |    77 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |     4 +-
 testdevapidocs/overview-tree.html               |     1 +
 .../hadoop/hbase/MockRegionServerServices.html  |   570 +-
 .../hbase/TestMetaTableAccessor.MetaTask.html   |  1288 +-
 ...estMetaTableAccessor.SpyingRpcScheduler.html |  1288 +-
 ...TableAccessor.SpyingRpcSchedulerFactory.html |  1288 +-
 .../hadoop/hbase/TestMetaTableAccessor.html     |  1288 +-
 .../TestMetaTableLocator.WaitOnMetaThread.html  |   537 +-
 .../hadoop/hbase/TestMetaTableLocator.html      |   537 +-
 .../hbase/client/TestMetaWithReplicas.html      |    88 +-
 .../hbase/master/MockNoopMasterServices.html    |   786 +-
 .../MockRegionServer.RegionNameAndIndex.html    |  1116 +-
 .../hadoop/hbase/master/MockRegionServer.html   |  1116 +-
 .../TestActiveMasterManager.DummyMaster.html    |   624 +-
 ...ctiveMasterManager.NodeDeletionListener.html |   624 +-
 ...ctiveMasterManager.WaitToBeMasterThread.html |   624 +-
 .../hbase/master/TestActiveMasterManager.html   |   624 +-
 .../cleaner/TestHFileCleaner.DummyServer.html   |   816 +-
 .../hbase/master/cleaner/TestHFileCleaner.html  |   816 +-
 .../TestHFileLinkCleaner.DummyServer.html       |   346 +-
 .../master/cleaner/TestHFileLinkCleaner.html    |   346 +-
 .../cleaner/TestLogsCleaner.DummyServer.html    |   799 +-
 .../TestLogsCleaner.FaultyZooKeeperWatcher.html |   799 +-
 .../hbase/master/cleaner/TestLogsCleaner.html   |   799 +-
 ...TestReplicationHFileCleaner.DummyServer.html |   530 +-
 ...tionHFileCleaner.FaultyZooKeeperWatcher.html |   530 +-
 .../cleaner/TestReplicationHFileCleaner.html    |   530 +-
 .../TestHeapMemoryManager.BlockCacheStub.html   |  1666 +-
 ...HeapMemoryManager.CustomHeapMemoryTuner.html |  1666 +-
 ...stHeapMemoryManager.MemstoreFlusherStub.html |  1666 +-
 ...emoryManager.RegionServerAccountingStub.html |  1666 +-
 .../TestHeapMemoryManager.RegionServerStub.html |  1666 +-
 .../regionserver/TestHeapMemoryManager.html     |  1666 +-
 .../regionserver/TestRegionServerNoMaster.html  |   391 +-
 .../TestSplitLogWorker.DummyServer.html         |   950 +-
 .../hbase/regionserver/TestSplitLogWorker.html  |   950 +-
 .../regionserver/TestWALLockup.DodgyFSLog.html  |  1002 +-
 .../regionserver/TestWALLockup.DummyServer.html |  1002 +-
 .../TestWALLockup.DummyWALActionsListener.html  |  1002 +-
 .../hbase/regionserver/TestWALLockup.html       |  1002 +-
 ...nTrackerZKImpl.DummyReplicationListener.html |   450 +-
 ...estReplicationTrackerZKImpl.DummyServer.html |   450 +-
 .../TestReplicationTrackerZKImpl.html           |   450 +-
 ...onSourceManager.DummyNodeFailoverWorker.html |  1636 +-
 ...estReplicationSourceManager.DummyServer.html |  1636 +-
 ...er.FailInitializeDummyReplicationSource.html |  1636 +-
 .../TestReplicationSourceManager.html           |  1636 +-
 .../hadoop/hbase/rsgroup/TestUtility.html       |   301 +
 ...entication.NonShadedBlockingRpcCallback.html |   954 +-
 .../TestTokenAuthentication.TokenServer.html    |   954 +-
 .../security/token/TestTokenAuthentication.html |   954 +-
 .../apache/hadoop/hbase/util/MockServer.html    |   259 +-
 315 files changed, 179735 insertions(+), 180638 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/acid-semantics.html
----------------------------------------------------------------------
diff --git a/acid-semantics.html b/acid-semantics.html
index 203b0a1..ab833c2 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) ACID Properties
@@ -611,7 +611,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-05</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/apache_hbase_reference_guide.pdf
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 22b95a4..d34c656 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20181203143314+00'00')
-/CreationDate (D:20181203144906+00'00')
+/ModDate (D:20181205143320+00'00')
+/CreationDate (D:20181205144913+00'00')
 >>
 endobj
 2 0 obj

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index 4410536..bc5543e 100644
--- a/book.html
+++ b/book.html
@@ -41318,7 +41318,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2018-12-03 14:33:14 UTC
+Last updated 2018-12-05 14:33:20 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index 7b8d261..c2d64c9 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181203" />
+    <meta name="Date-Revision-yyyymmdd" content="20181205" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -316,7 +316,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-03</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-05</li>
             </p>
                 </div>
 


[14/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK

<TRUNCATED>

[42/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.html b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
index 5549ccc..399a075 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionServerServices.html
@@ -304,7 +304,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/Server.html" title="inte
 <!--   -->
 </a>
 <h3>Methods inherited from interface&nbsp;org.apache.hadoop.hbase.<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getServerName--">getServerName</a>, <a href="../../../../..
 /org/apache/hadoop/hbase/Server.html#getZooKeeper--">getZooKeeper</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getServerName--">getServerName</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getZooKeeper--">getZooKeeper</a>, <a href="../../../../../org/apache/ha
 doop/hbase/Server.html#isStopping--">isStopping</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.Abortable">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 52bcb5f..39721a5 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -716,20 +716,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 2731576..3bd22b5 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -131,8 +131,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
index 1c700a4..f8907e8 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.103">ReplicationSyncUp.DummyServer</a>
+<pre>class <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.102">ReplicationSyncUp.DummyServer</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></pre>
 </li>
@@ -225,37 +225,30 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#getFileSystem--">getFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i8" class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#getMetaTableLocator--">getMetaTableLocator</a></span>()</code>
-<div class="block">Returns instance of <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
- running inside this server.</div>
-</td>
-</tr>
-<tr id="i9" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#getServerName--">getServerName</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#getZooKeeper--">getZooKeeper</a></span>()</code>
 <div class="block">Gets the ZooKeeper instance for this server.</div>
 </td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#isAborted--">isAborted</a></span>()</code>
 <div class="block">Check if the server or client was aborted.</div>
 </td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#isStopped--">isStopped</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#isStopping--">isStopping</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#stop-java.lang.String-">stop</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</code>
 <div class="block">Stop this service.</div>
@@ -289,7 +282,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>hostname</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.104">hostname</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.103">hostname</a></pre>
 </li>
 </ul>
 <a name="zkw">
@@ -298,7 +291,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>zkw</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.105">zkw</a></pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.104">zkw</a></pre>
 </li>
 </ul>
 </li>
@@ -315,7 +308,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>DummyServer</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.107">DummyServer</a>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.106">DummyServer</a>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
 </li>
 </ul>
 <a name="DummyServer-java.lang.String-">
@@ -324,7 +317,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DummyServer</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.113">DummyServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname)</pre>
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.112">DummyServer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;hostname)</pre>
 </li>
 </ul>
 </li>
@@ -341,7 +334,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>getConfiguration</h4>
-<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.118">getConfiguration</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.117">getConfiguration</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">Server</a></code></span></div>
 <div class="block">Gets the configuration object for this server.</div>
 <dl>
@@ -356,7 +349,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>getZooKeeper</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.123">getZooKeeper</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.122">getZooKeeper</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getZooKeeper--">Server</a></code></span></div>
 <div class="block">Gets the ZooKeeper instance for this server.</div>
 <dl>
@@ -371,7 +364,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>getCoordinatedStateManager</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CoordinatedStateManager.html" title="interface in org.apache.hadoop.hbase">CoordinatedStateManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.128">getCoordinatedStateManager</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/CoordinatedStateManager.html" title="interface in org.apache.hadoop.hbase">CoordinatedStateManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.127">getCoordinatedStateManager</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">Server</a></code></span></div>
 <div class="block">Get CoordinatedStateManager instance for this server.</div>
 <dl>
@@ -380,32 +373,13 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 </dl>
 </li>
 </ul>
-<a name="getMetaTableLocator--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>getMetaTableLocator</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.133">getMetaTableLocator</a>()</pre>
-<div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">Server</a></code></span></div>
-<div class="block">Returns instance of <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
- running inside this server. This MetaServerLocator is started and stopped by server, clients
- shouldn't manage it's lifecycle.</div>
-<dl>
-<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
-<dd><code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></code></dd>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>instance of <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a> associated with this server.</dd>
-</dl>
-</li>
-</ul>
 <a name="getServerName--">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerName</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.138">getServerName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.132">getServerName</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getServerName--">getServerName</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></code></dd>
@@ -420,7 +394,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>abort</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.143">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.137">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;e)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Abortable.html#abort-java.lang.String-java.lang.Throwable-">Abortable</a></code></span></div>
 <div class="block">Abort the server or client.</div>
@@ -439,7 +413,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>isAborted</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.147">isAborted</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.141">isAborted</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Abortable.html#isAborted--">Abortable</a></code></span></div>
 <div class="block">Check if the server or client was aborted.</div>
 <dl>
@@ -456,7 +430,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.152">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.146">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-">Stoppable</a></code></span></div>
 <div class="block">Stop this service.
  Implementers should favor logging errors over throwing RuntimeExceptions.</div>
@@ -474,7 +448,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>isStopped</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.156">isStopped</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.150">isStopped</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Stoppable.html#isStopped--">isStopped</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a></code></dd>
@@ -489,7 +463,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>getConnection</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.161">getConnection</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.155">getConnection</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">Server</a></code></span></div>
 <div class="block">Returns a reference to the servers' connection.
 
@@ -507,7 +481,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>getChoreService</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ChoreService.html" title="class in org.apache.hadoop.hbase">ChoreService</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.166">getChoreService</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/ChoreService.html" title="class in org.apache.hadoop.hbase">ChoreService</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.160">getChoreService</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></code></dd>
@@ -522,7 +496,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterConnection</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.171">getClusterConnection</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.165">getClusterConnection</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">Server</a></code></span></div>
 <div class="block">Returns a reference to the servers' cluster connection. Prefer <a href="../../../../../../org/apache/hadoop/hbase/Server.html#getConnection--"><code>Server.getConnection()</code></a>.
 
@@ -540,7 +514,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileSystem</h4>
-<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.176">getFileSystem</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.170">getFileSystem</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></code></dd>
@@ -555,7 +529,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>isStopping</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.181">isStopping</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.175">isStopping</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></code></dd>
@@ -570,7 +544,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createConnection</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.186">createConnection</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#line.180">createConnection</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
index 95324b5..b059664 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.52">ReplicationSyncUp</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.51">ReplicationSyncUp</a>
 extends org.apache.hadoop.conf.Configured
 implements org.apache.hadoop.util.Tool</pre>
 <div class="block">In a scenario of Replication based Disaster/Recovery, when hbase Master-Cluster crashes, this
@@ -255,7 +255,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SLEEP_TIME</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.54">SLEEP_TIME</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.53">SLEEP_TIME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.replication.regionserver.ReplicationSyncUp.SLEEP_TIME">Constant Field Values</a></dd>
@@ -276,7 +276,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ReplicationSyncUp</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.52">ReplicationSyncUp</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.51">ReplicationSyncUp</a>()</pre>
 </li>
 </ul>
 </li>
@@ -293,7 +293,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>main</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.59">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.58">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <div class="block">Main program</div>
 <dl>
@@ -308,7 +308,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.65">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.html#line.64">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
index cd3870f..30c4e73 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
@@ -207,8 +207,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceFactoryImpl.SourceHolder.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">MetricsReplicationSourceFactoryImpl.SourceHolder</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
index 034077c..c20ff47 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
@@ -110,8 +110,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
index bdd992d..312f07e 100644
--- a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
+++ b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html
@@ -322,7 +322,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createRSGroupTable</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html#line.831">createRSGroupTable</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html#line.830">createRSGroupTable</a>()
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -336,7 +336,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isOnline</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html#line.856">isOnline</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.RSGroupStartupWorker.html#line.855">isOnline</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
index 183427a..bfd804e 100644
--- a/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html
@@ -1053,7 +1053,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupInfoMa
 <ul class="blockList">
 <li class="blockList">
 <h4>isMasterRunning</h4>
-<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html#line.861">isMasterRunning</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterServices.html" title="interface in org.apache.hadoop.hbase.master">MasterServices</a>&nbsp;masterServices)</pre>
+<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html#line.860">isMasterRunning</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterServices.html" title="interface in org.apache.hadoop.hbase.master">MasterServices</a>&nbsp;masterServices)</pre>
 </li>
 </ul>
 <a name="multiMutate-java.util.List-">
@@ -1062,7 +1062,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupInfoMa
 <ul class="blockList">
 <li class="blockList">
 <h4>multiMutate</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html#line.865">multiMutate</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html#line.864">multiMutate</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1076,7 +1076,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/rsgroup/RSGroupInfoMa
 <ul class="blockListLast">
 <li class="blockList">
 <h4>checkGroupName</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html#line.894">checkGroupName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;groupName)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.html#line.893">checkGroupName</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;groupName)
                      throws <a href="../../../../../org/apache/hadoop/hbase/constraint/ConstraintException.html" title="class in org.apache.hadoop.hbase.constraint">ConstraintException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/rsgroup/Utility.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/rsgroup/Utility.html b/devapidocs/org/apache/hadoop/hbase/rsgroup/Utility.html
index 354bfb7..bcdfd05 100644
--- a/devapidocs/org/apache/hadoop/hbase/rsgroup/Utility.html
+++ b/devapidocs/org/apache/hadoop/hbase/rsgroup/Utility.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -75,13 +75,13 @@ var activeTableTab = "activeTableTab";
 <ul class="subNavList">
 <li>Summary:&nbsp;</li>
 <li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
 <li><a href="#method.summary">Method</a></li>
 </ul>
 <ul class="subNavList">
 <li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
 <li><a href="#method.detail">Method</a></li>
 </ul>
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-final class <a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.34">Utility</a>
+final class <a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.61">Utility</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Utility for this RSGroup package in hbase-rsgroup.</div>
 </li>
@@ -119,6 +119,25 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="summary">
 <ul class="blockList">
 <li class="blockList">
+<!-- =========== FIELD SUMMARY =========== -->
+<ul class="blockList">
+<li class="blockList"><a name="field.summary">
+<!--   -->
+</a>
+<h3>Field Summary</h3>
+<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation">
+<caption><span>Fields</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Field and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#LOG">LOG</a></span></code>&nbsp;</td>
+</tr>
+</table>
+</li>
+</ul>
 <!-- ======== CONSTRUCTOR SUMMARY ======== -->
 <ul class="blockList">
 <li class="blockList"><a name="constructor.summary">
@@ -151,9 +170,51 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <th class="colLast" scope="col">Method and Description</th>
 </tr>
 <tr id="i0" class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                   <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)</code>&nbsp;</td>
+</tr>
+<tr id="i1" class="rowColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                       <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                       long&nbsp;timeout,
+                       int&nbsp;replicaId)</code>
+<div class="block">Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the
+ specified timeout for availability.</div>
+</td>
+</tr>
+<tr id="i2" class="altColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/net/Address.html" title="class in org.apache.hadoop.hbase.net">Address</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getOnlineServers-org.apache.hadoop.hbase.master.MasterServices-">getOnlineServers</a></span>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterServices.html" title="interface in org.apache.hadoop.hbase.master">MasterServices</a>&nbsp;master)</code>&nbsp;</td>
 </tr>
+<tr id="i3" class="rowColor">
+<td class="colFirst"><code>static boolean</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
+                        <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                        long&nbsp;timeout)</code>
+<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
+</td>
+</tr>
+<tr id="i4" class="altColor">
+<td class="colFirst"><code>static boolean</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                        <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                        long&nbsp;timeout,
+                        int&nbsp;replicaId)</code>
+<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
+</td>
+</tr>
+<tr id="i5" class="rowColor">
+<td class="colFirst"><code>private static boolean</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;hostingServer,
+                    <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;address,
+                    byte[]&nbsp;regionName)</code>
+<div class="block">Verify we can connect to <code>hostingServer</code> and that its carrying
+ <code>regionName</code>.</div>
+</td>
+</tr>
 </table>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
@@ -170,6 +231,23 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="details">
 <ul class="blockList">
 <li class="blockList">
+<!-- ============ FIELD DETAIL =========== -->
+<ul class="blockList">
+<li class="blockList"><a name="field.detail">
+<!--   -->
+</a>
+<h3>Field Detail</h3>
+<a name="LOG">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>LOG</h4>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.63">LOG</a></pre>
+</li>
+</ul>
+</li>
+</ul>
 <!-- ========= CONSTRUCTOR DETAIL ======== -->
 <ul class="blockList">
 <li class="blockList"><a name="constructor.detail">
@@ -182,7 +260,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>Utility</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.35">Utility</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.65">Utility</a>()</pre>
 </li>
 </ul>
 </li>
@@ -196,10 +274,10 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <a name="getOnlineServers-org.apache.hadoop.hbase.master.MasterServices-">
 <!--   -->
 </a>
-<ul class="blockListLast">
+<ul class="blockList">
 <li class="blockList">
 <h4>getOnlineServers</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/net/Address.html" title="class in org.apache.hadoop.hbase.net">Address</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.42">getOnlineServers</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterServices.html" title="interface in org.apache.hadoop.hbase.master">MasterServices</a>&nbsp;master)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/net/Address.html" title="class in org.apache.hadoop.hbase.net">Address</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.72">getOnlineServers</a>(<a href="../../../../../org/apache/hadoop/hbase/master/MasterServices.html" title="interface in org.apache.hadoop.hbase.master">MasterServices</a>&nbsp;master)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>master</code> - the master to get online servers for</dd>
@@ -208,6 +286,132 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
+<a name="verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>verifyMetaRegionLocation</h4>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.94">verifyMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
+                                               <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                               long&nbsp;timeout)
+                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
+                                               <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>hConnection</code> - the connection to use</dd>
+<dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
+<dd><code>timeout</code> - How long to wait on zk for meta address (passed through to the internal call to
+          <a href="../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-"><code>getMetaServerConnection(org.apache.hadoop.hbase.client.ClusterConnection, org.apache.hadoop.hbase.zookeeper.ZKWatcher, long, int)</code></a>.</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>True if the <code>hbase:meta</code> location is healthy.</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if waiting for the socket operation fails</dd>
+</dl>
+</li>
+</ul>
+<a name="verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>verifyMetaRegionLocation</h4>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.109">verifyMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                                               <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                               long&nbsp;timeout,
+                                               int&nbsp;replicaId)
+                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
+                                               <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>connection</code> - the connection to use</dd>
+<dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
+<dd><code>timeout</code> - How long to wait on zk for meta address (passed through to</dd>
+<dd><code>replicaId</code> - the ID of the replica</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>True if the <code>hbase:meta</code> location is healthy.</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if waiting for the socket operation fails</dd>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
+</dl>
+</li>
+</ul>
+<a name="verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>verifyRegionLocation</h4>
+<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.142">verifyRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                                            org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;hostingServer,
+                                            <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;address,
+                                            byte[]&nbsp;regionName)</pre>
+<div class="block">Verify we can connect to <code>hostingServer</code> and that its carrying
+ <code>regionName</code>.</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>hostingServer</code> - Interface to the server hosting <code>regionName</code></dd>
+<dd><code>address</code> - The servername that goes with the <code>metaServer</code> interface. Used
+          logging.</dd>
+<dd><code>regionName</code> - The regionname we are interested in.</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>True if we were able to verify the region located at other side of the interface.</dd>
+</dl>
+</li>
+</ul>
+<a name="getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getMetaServerConnection</h4>
+<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.190">getMetaServerConnection</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                                                                                                                                    <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                                                                                                                    long&nbsp;timeout,
+                                                                                                                                    int&nbsp;replicaId)
+                                                                                                                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
+                                                                                                                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the
+ specified timeout for availability.
+ <p>
+ WARNING: Does not retry. Use an <a href="../../../../../org/apache/hadoop/hbase/client/HTable.html" title="class in org.apache.hadoop.hbase.client"><code>HTable</code></a> instead.</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>connection</code> - the connection to use</dd>
+<dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
+<dd><code>timeout</code> - How long to wait on meta location</dd>
+<dd><code>replicaId</code> - the ID of the replica</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>connection to server hosting meta</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if waiting for the socket operation fails</dd>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
+</dl>
+</li>
+</ul>
+<a name="getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>getCachedConnection</h4>
+<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/rsgroup/Utility.html#line.203">getCachedConnection</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                                                                                                                                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)
+                                                                                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>sn</code> - ServerName to get a connection against.</dd>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>The AdminProtocol we got when we connected to <code>sn</code> May have come from cache,
+         may not be good, may have been setup by this invocation, or may be null.</dd>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
+</dl>
+</li>
+</ul>
 </li>
 </ul>
 </li>
@@ -262,13 +466,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="subNavList">
 <li>Summary:&nbsp;</li>
 <li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
 <li><a href="#method.summary">Method</a></li>
 </ul>
 <ul class="subNavList">
 <li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
 <li><a href="#method.detail">Method</a></li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index f00f533..a2449b3 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -141,10 +141,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index dfa02b5..67b7e3a 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -191,9 +191,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 4ade4c1..2cef8bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -199,8 +199,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftServerRunner.ImplType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
index 5fd4df8..864bbb2 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2114">HBaseFsck.CheckRegionConsistencyWorkItem</a>
+<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2108">HBaseFsck.CheckRegionConsistencyWorkItem</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 </li>
@@ -211,7 +211,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>key</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2115">key</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2109">key</a></pre>
 </li>
 </ul>
 <a name="hbi">
@@ -220,7 +220,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hbi</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2116">hbi</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2110">hbi</a></pre>
 </li>
 </ul>
 </li>
@@ -237,7 +237,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CheckRegionConsistencyWorkItem</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2118">CheckRegionConsistencyWorkItem</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2112">CheckRegionConsistencyWorkItem</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
                                <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)</pre>
 </li>
 </ul>
@@ -255,7 +255,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2124">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2118">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>


[45/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
index 6c0477a..f14d23f 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.314">MasterRpcServices</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.315">MasterRpcServices</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a>
 implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService.BlockingInterface, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface, org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService.BlockingInterface, org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.HbckService.BlockingInterface</pre>
 <div class="block">Implements the master RPC services.</div>
@@ -851,7 +851,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.317">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.318">LOG</a></pre>
 </li>
 </ul>
 <a name="master">
@@ -860,7 +860,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockListLast">
 <li class="blockList">
 <h4>master</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.319">master</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.320">master</a></pre>
 </li>
 </ul>
 </li>
@@ -877,7 +877,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MasterRpcServices</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.341">MasterRpcServices</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;m)
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.342">MasterRpcServices</a>(<a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" title="class in org.apache.hadoop.hbase.master">HMaster</a>&nbsp;m)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -899,7 +899,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>createConfigurationSubset</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.325">createConfigurationSubset</a>()</pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.326">createConfigurationSubset</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>Subset of configuration to pass initializing regionservers: e.g.
@@ -913,7 +913,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>addConfig</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.332">addConfig</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder&nbsp;resp,
+<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.333">addConfig</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder&nbsp;resp,
                                                                                                                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key)</pre>
 </li>
 </ul>
@@ -923,7 +923,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getRpcSchedulerFactoryClass</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.347">getRpcSchedulerFactoryClass</a>()</pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.348">getRpcSchedulerFactoryClass</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html#getRpcSchedulerFactoryClass--">getRpcSchedulerFactoryClass</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a></code></dd>
@@ -936,7 +936,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>createRpcServer</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ipc/RpcServerInterface.html" title="interface in org.apache.hadoop.hbase.ipc">RpcServerInterface</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.357">createRpcServer</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;server,
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ipc/RpcServerInterface.html" title="interface in org.apache.hadoop.hbase.ipc">RpcServerInterface</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.358">createRpcServer</a>(<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;server,
                                              org.apache.hadoop.conf.Configuration&nbsp;conf,
                                              <a href="../../../../../org/apache/hadoop/hbase/regionserver/RpcSchedulerFactory.html" title="interface in org.apache.hadoop.hbase.regionserver">RpcSchedulerFactory</a>&nbsp;rpcSchedulerFactory,
                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>&nbsp;bindAddress,
@@ -956,7 +956,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>createPriority</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ipc/PriorityFunction.html" title="interface in org.apache.hadoop.hbase.ipc">PriorityFunction</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.375">createPriority</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ipc/PriorityFunction.html" title="interface in org.apache.hadoop.hbase.ipc">PriorityFunction</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.376">createPriority</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html#createPriority--">createPriority</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a></code></dd>
@@ -969,7 +969,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>rpcPreCheck</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.388">rpcPreCheck</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;requestName)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.389">rpcPreCheck</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;requestName)
                   throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Checks for the following pre-checks in order:
  <ol>
@@ -990,7 +990,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>switchBalancer</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.408">switchBalancer</a>(boolean&nbsp;b,
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.409">switchBalancer</a>(boolean&nbsp;b,
                        <a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master">MasterRpcServices.BalanceSwitchMode</a>&nbsp;mode)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Assigns balancer switch according to BalanceSwitchMode</div>
@@ -1011,7 +1011,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>synchronousBalanceSwitch</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.437">synchronousBalanceSwitch</a>(boolean&nbsp;b)
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.438">synchronousBalanceSwitch</a>(boolean&nbsp;b)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1025,7 +1025,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getServices</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html" title="class in org.apache.hadoop.hbase.ipc">RpcServer.BlockingServiceAndInterface</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.445">getServices</a>()</pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html" title="class in org.apache.hadoop.hbase.ipc">RpcServer.BlockingServiceAndInterface</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.446">getServices</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html#getServices--">RSRpcServices</a></code></span></div>
 <div class="block">By default, put up an Admin and a Client Service.
  Set booleans <code>hbase.regionserver.admin.executorService</code> and
@@ -1045,7 +1045,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastFlushedSequenceId</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.463">getLastFlushedSequenceId</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.464">getLastFlushedSequenceId</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest&nbsp;request)
                                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1062,7 +1062,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>regionServerReport</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.477">regionServerReport</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.478">regionServerReport</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest&nbsp;request)
                                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1079,7 +1079,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>regionServerStartup</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.508">regionServerStartup</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.509">regionServerStartup</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest&nbsp;request)
                                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1096,7 +1096,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>reportRSFatalError</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.539">reportRSFatalError</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.540">reportRSFatalError</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest&nbsp;request)
                                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1113,7 +1113,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>addColumn</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.550">addColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.551">addColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest&nbsp;req)
                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1130,7 +1130,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>assignRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.570">assignRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.571">assignRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest&nbsp;req)
                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1147,7 +1147,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>balance</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.602">balance</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.603">balance</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest&nbsp;request)
                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1164,7 +1164,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>createNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.613">createNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.614">createNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1181,7 +1181,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>createTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.627">createTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.628">createTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest&nbsp;req)
                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1198,7 +1198,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteColumn</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.643">deleteColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.644">deleteColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest&nbsp;req)
                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1215,7 +1215,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.663">deleteNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.664">deleteNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1232,7 +1232,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteSnapshot</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.684">deleteSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.685">deleteSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest&nbsp;request)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Execute Delete Snapshot operation.</div>
@@ -1254,7 +1254,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.699">deleteTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.700">deleteTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest&nbsp;request)
                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1271,7 +1271,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>truncateTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.711">truncateTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.712">truncateTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1288,7 +1288,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>disableTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.726">disableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.727">disableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest&nbsp;request)
                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1305,7 +1305,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>enableCatalogJanitor</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.740">enableCatalogJanitor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.741">enableCatalogJanitor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest&nbsp;req)
                                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1322,7 +1322,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setCleanerChoreRunning</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.748">setCleanerChoreRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.749">setCleanerChoreRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest&nbsp;req)
                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1339,7 +1339,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>enableTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.760">enableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.761">enableTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest&nbsp;request)
                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1356,7 +1356,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>mergeTableRegions</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.774">mergeTableRegions</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.775">mergeTableRegions</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest&nbsp;request)
                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1373,7 +1373,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>splitRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.814">splitRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.815">splitRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                            org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest&nbsp;request)
                                                                                                     throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1390,7 +1390,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>execMasterService</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.829">execMasterService</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.830">execMasterService</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                    org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest&nbsp;request)
                                                                                                             throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1407,7 +1407,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>execProcedure</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.873">execProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.874">execProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Triggers an asynchronous attempt to run a distributed procedure.
@@ -1426,7 +1426,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>execProcedureWithRet</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.905">execProcedureWithRet</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.906">execProcedureWithRet</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                  org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest&nbsp;request)
                                                                                                           throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Triggers a synchronous attempt to run a distributed procedure and sets
@@ -1446,7 +1446,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterStatus</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.929">getClusterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.930">getClusterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest&nbsp;req)
                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1463,7 +1463,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompletedSnapshots</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.946">getCompletedSnapshots</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.947">getCompletedSnapshots</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest&nbsp;request)
                                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">List the currently available/stored snapshots. Any in-progress snapshots are ignored</div>
@@ -1481,7 +1481,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getNamespaceDescriptor</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.964">getNamespaceDescriptor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.965">getNamespaceDescriptor</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest&nbsp;request)
                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1498,7 +1498,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getSchemaAlterStatus</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.986">getSchemaAlterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.987">getSchemaAlterStatus</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest&nbsp;req)
                                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Get the number of regions of the table that have been updated by the alter.</div>
@@ -1520,7 +1520,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableDescriptors</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1015">getTableDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1016">getTableDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest&nbsp;req)
                                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Get list of TableDescriptors for requested tables.</div>
@@ -1544,7 +1544,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableNames</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1054">getTableNames</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1055">getTableNames</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest&nbsp;req)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Get list of userspace table names</div>
@@ -1567,7 +1567,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableState</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1078">getTableState</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1079">getTableState</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1584,7 +1584,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isCatalogJanitorEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1093">isCatalogJanitorEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1094">isCatalogJanitorEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest&nbsp;req)
                                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1601,7 +1601,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isCleanerChoreEnabled</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1100">isCleanerChoreEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1101">isCleanerChoreEnabled</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest&nbsp;req)
                                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1618,7 +1618,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isMasterRunning</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1108">isMasterRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1109">isMasterRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest&nbsp;req)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1635,7 +1635,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isProcedureDone</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1125">isProcedureDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1126">isProcedureDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Checks if the specified procedure is done.</div>
@@ -1655,7 +1655,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isSnapshotDone</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1159">isSnapshotDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1160">isSnapshotDone</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest&nbsp;request)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Checks if the specified snapshot is done.</div>
@@ -1677,7 +1677,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getProcedureResult</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1177">getProcedureResult</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1178">getProcedureResult</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest&nbsp;request)
                                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1694,7 +1694,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>abortProcedure</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1214">abortProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1215">abortProcedure</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest&nbsp;request)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1711,7 +1711,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>listNamespaceDescriptors</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1228">listNamespaceDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1229">listNamespaceDescriptors</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest&nbsp;request)
                                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1728,7 +1728,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getProcedures</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1243">getProcedures</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1244">getProcedures</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;rpcController,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1745,7 +1745,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getLocks</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1258">getLocks</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1259">getLocks</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest&nbsp;request)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1762,7 +1762,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>listTableDescriptorsByNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1275">listTableDescriptorsByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1276">listTableDescriptorsByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest&nbsp;request)
                                                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1779,7 +1779,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>listTableNamesByNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1291">listTableNamesByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1292">listTableNamesByNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest&nbsp;request)
                                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1796,7 +1796,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>modifyColumn</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1306">modifyColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1307">modifyColumn</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                         org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest&nbsp;req)
                                                                                                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1813,7 +1813,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>modifyNamespace</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1326">modifyNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1327">modifyNamespace</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1830,7 +1830,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>modifyTable</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1340">modifyTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1341">modifyTable</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                       org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest&nbsp;req)
                                                                                                throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1847,7 +1847,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>moveRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1355">moveRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1356">moveRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest&nbsp;req)
                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1864,7 +1864,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>offlineRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1385">offlineRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1386">offlineRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                           org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest&nbsp;request)
                                                                                                    throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Offline specified region from master's in-memory state. It will not attempt to
@@ -1885,7 +1885,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreSnapshot</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1428">restoreSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1429">restoreSnapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest&nbsp;request)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Execute Restore/Clone snapshot operation.
@@ -1913,7 +1913,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>runCatalogScan</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1442">runCatalogScan</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1443">runCatalogScan</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest&nbsp;req)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1930,7 +1930,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>runCleanerChore</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1453">runCleanerChore</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1454">runCleanerChore</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                               org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest&nbsp;req)
                                                                                                        throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1947,7 +1947,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setBalancerRunning</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1461">setBalancerRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1462">setBalancerRunning</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest&nbsp;req)
                                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1964,7 +1964,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdown</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1474">shutdown</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1475">shutdown</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest&nbsp;request)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -1981,7 +1981,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>snapshot</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1491">snapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1492">snapshot</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest&nbsp;request)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Triggers an asynchronous attempt to take a snapshot.
@@ -2000,7 +2000,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>stopMaster</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1516">stopMaster</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1517">stopMaster</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                     org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest&nbsp;request)
                                                                                              throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2017,7 +2017,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>isMasterInMaintenanceMode</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1529">isMasterInMaintenanceMode</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1530">isMasterInMaintenanceMode</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest&nbsp;request)
                                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2034,7 +2034,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>unassignRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1538">unassignRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1539">unassignRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                             org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest&nbsp;req)
                                                                                                      throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2051,7 +2051,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>reportRegionStateTransition</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1579">reportRegionStateTransition</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1580">reportRegionStateTransition</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest&nbsp;req)
                                                                                                                                            throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2068,7 +2068,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>setQuota</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1590">setQuota</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1591">setQuota</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;c,
                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest&nbsp;req)
                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2085,7 +2085,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastMajorCompactionTimestamp</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1601">getLastMajorCompactionTimestamp</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1602">getLastMajorCompactionTimestamp</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                        org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest&nbsp;request)
                                                                                                                                 throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2102,7 +2102,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastMajorCompactionTimestampForRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1616">getLastMajorCompactionTimestampForRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1617">getLastMajorCompactionTimestampForRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                                                                 org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest&nbsp;request)
                                                                                                                                          throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <dl>
@@ -2119,7 +2119,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>compactRegion</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1640">compactRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1641">compactRegion</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
                                                                                                          org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest&nbsp;request)
                                                                                                   throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Compact a region on the master.</div>
@@ -2142,7 +2142,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>checkHFileFormatVersionForMob</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1662">checkHFileFormatVersionForMob</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1663">checkHFileFormatVersionForMob</a>()
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">check configured hfile format version before to do compaction</div>
 <dl>
@@ -2157,7 +2157,7 @@ implements org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.Master
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionInfo</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1674">getRegionInfo</a>(org.apache.hbase.thirdparty.com.google.protobuf.RpcController&nbsp;controller,
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html#line.1675">getRegionInfo</a

<TRUNCATED>

[28/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index ea05301..26a93dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -269,3590 +269,3574 @@
 <span class="sourceLineNo">261</span>   */<a name="line.261"></a>
 <span class="sourceLineNo">262</span>  protected ClusterConnection clusterConnection;<a name="line.262"></a>
 <span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /*<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Long-living meta table locator, which is created when the server is started and stopped<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * when server shuts down. References to this locator shall be used to perform according<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * operations in EventHandlers. Primary reason for this decision is to make it mockable<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * for tests.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  protected MetaTableLocator metaTableLocator;<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * Go here to get table descriptors.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  protected TableDescriptors tableDescriptors;<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>  // Replication services. If no replication, this handler will be null.<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  // Compactions<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public CompactSplit compactSplitThread;<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Map of regions currently being served by this region server. Key is the<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * encoded region name.  All access should be synchronized.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /**<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * and here we really mean DataNode locations.<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  // Leases<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  protected Leases leases;<a name="line.303"></a>
+<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * Go here to get table descriptors.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  protected TableDescriptors tableDescriptors;<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>  // Replication services. If no replication, this handler will be null.<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  // Compactions<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public CompactSplit compactSplitThread;<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /**<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * Map of regions currently being served by this region server. Key is the<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * encoded region name.  All access should be synchronized.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.280"></a>
+<span class="sourceLineNo">281</span><a name="line.281"></a>
+<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
+<span class="sourceLineNo">283</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * and here we really mean DataNode locations.<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   */<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  // Leases<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  protected Leases leases;<a name="line.295"></a>
+<span class="sourceLineNo">296</span><a name="line.296"></a>
+<span class="sourceLineNo">297</span>  // Instance of the hbase executor executorService.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  protected ExecutorService executorService;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  // If false, the file system has become unavailable<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  protected volatile boolean fsOk;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  protected HFileSystem fs;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  protected HFileSystem walFs;<a name="line.303"></a>
 <span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>  // Instance of the hbase executor executorService.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  protected ExecutorService executorService;<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  // If false, the file system has become unavailable<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  protected volatile boolean fsOk;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  protected HFileSystem fs;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  protected HFileSystem walFs;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  // Set when a report to the master comes back with a message asking us to<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  // of HRegionServer in isolation.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private volatile boolean stopped = false;<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // debugging and unit tests.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private volatile boolean abortRequested;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  // Default abort timeout is 1200 seconds for safe<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  // Will run this task when abort timeout<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.325"></a>
+<span class="sourceLineNo">305</span>  // Set when a report to the master comes back with a message asking us to<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  // of HRegionServer in isolation.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private volatile boolean stopped = false;<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  // debugging and unit tests.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>  private volatile boolean abortRequested;<a name="line.312"></a>
+<span class="sourceLineNo">313</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  // Default abort timeout is 1200 seconds for safe<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  // Will run this task when abort timeout<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  // space regions.<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  private boolean stopping = false;<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>  volatile boolean killed = false;<a name="line.325"></a>
 <span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.327"></a>
+<span class="sourceLineNo">327</span>  private volatile boolean shutDown = false;<a name="line.327"></a>
 <span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  // space regions.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  private boolean stopping = false;<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  volatile boolean killed = false;<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  private volatile boolean shutDown = false;<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>  protected final Configuration conf;<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Path rootDir;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Path walRootDir;<a name="line.340"></a>
+<span class="sourceLineNo">329</span>  protected final Configuration conf;<a name="line.329"></a>
+<span class="sourceLineNo">330</span><a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private Path rootDir;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  private Path walRootDir;<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  final int numRetries;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  protected final int threadWakeFrequency;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  protected final int msgInterval;<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  protected final int numRegionsToReport;<a name="line.340"></a>
 <span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  final int numRetries;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  protected final int threadWakeFrequency;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected final int msgInterval;<a name="line.346"></a>
+<span class="sourceLineNo">342</span>  // Stub to do region server status calls against the master.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  RpcClient rpcClient;<a name="line.346"></a>
 <span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  protected final int numRegionsToReport;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  // Stub to do region server status calls against the master.<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  RpcClient rpcClient;<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.357"></a>
+<span class="sourceLineNo">348</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  // into web context.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  protected InfoServer infoServer;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  private JvmPauseMonitor pauseMonitor;<a name="line.357"></a>
 <span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  // into web context.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  protected InfoServer infoServer;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  private JvmPauseMonitor pauseMonitor;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** region server process name */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String REGIONSERVER = "regionserver";<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  MetricsRegionServer metricsRegionServer;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  MetricsTable metricsTable;<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  private SpanReceiverHost spanReceiverHost;<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private ChoreService choreService;<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /*<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Check for compactions requests.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  ScheduledChore compactionChecker;<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /*<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * Check for flushes<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  ScheduledChore periodicFlusher;<a name="line.387"></a>
-<span class="sourceLineNo">388</span><a name="line.388"></a>
-<span class="sourceLineNo">389</span>  protected volatile WALFactory walFactory;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // WAL roller. log is protected rather than private to avoid<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // eclipse warning when accessed by inner classes<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  protected LogRoller walRoller;<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // A thread which calls reportProcedureDone<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // flag set after we're done setting up server threads<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // zookeeper connection and watcher<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  protected final ZKWatcher zooKeeper;<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // master address tracker<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // Cluster Status Tracker<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  // Log Splitting Worker<a name="line.410"></a>
-<span class="sourceLineNo">411</span>  private SplitLogWorker splitLogWorker;<a name="line.411"></a>
+<span class="sourceLineNo">359</span>  /** region server process name */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public static final String REGIONSERVER = "regionserver";<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  MetricsRegionServer metricsRegionServer;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  MetricsTable metricsTable;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>  private SpanReceiverHost spanReceiverHost;<a name="line.364"></a>
+<span class="sourceLineNo">365</span><a name="line.365"></a>
+<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private ChoreService choreService;<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /*<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Check for compactions requests.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  ScheduledChore compactionChecker;<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  /*<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * Check for flushes<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   */<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  ScheduledChore periodicFlusher;<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  protected volatile WALFactory walFactory;<a name="line.381"></a>
+<span class="sourceLineNo">382</span><a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // WAL roller. log is protected rather than private to avoid<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // eclipse warning when accessed by inner classes<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected LogRoller walRoller;<a name="line.385"></a>
+<span class="sourceLineNo">386</span><a name="line.386"></a>
+<span class="sourceLineNo">387</span>  // A thread which calls reportProcedureDone<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.388"></a>
+<span class="sourceLineNo">389</span><a name="line.389"></a>
+<span class="sourceLineNo">390</span>  // flag set after we're done setting up server threads<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // zookeeper connection and watcher<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  protected final ZKWatcher zooKeeper;<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // master address tracker<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.397"></a>
+<span class="sourceLineNo">398</span><a name="line.398"></a>
+<span class="sourceLineNo">399</span>  // Cluster Status Tracker<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // Log Splitting Worker<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  private SplitLogWorker splitLogWorker;<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // A sleeper that sleeps for msgInterval.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  protected final Sleeper sleeper;<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private final int operationTimeout;<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  private final int shortOperationTimeout;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.411"></a>
 <span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>  // A sleeper that sleeps for msgInterval.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  protected final Sleeper sleeper;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  private final int operationTimeout;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  private final int shortOperationTimeout;<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.419"></a>
+<span class="sourceLineNo">413</span>  // Cache configuration and block cache reference<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  protected CacheConfig cacheConfig;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  // Cache configuration for mob<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  final MobCacheConfig mobCacheConfig;<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** The health check chore. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  private HealthCheckChore healthCheckChore;<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  // Cache configuration and block cache reference<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  protected CacheConfig cacheConfig;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  // Cache configuration for mob<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  final MobCacheConfig mobCacheConfig;<a name="line.424"></a>
+<span class="sourceLineNo">421</span>  /** The nonce manager chore. */<a name="line.421"></a>
+<span class="sourceLineNo">422</span>  private ScheduledChore nonceManagerChore;<a name="line.422"></a>
+<span class="sourceLineNo">423</span><a name="line.423"></a>
+<span class="sourceLineNo">424</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.424"></a>
 <span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>  /** The health check chore. */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>  private HealthCheckChore healthCheckChore;<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>  /** The nonce manager chore. */<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  private ScheduledChore nonceManagerChore;<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * against  Master.<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected ServerName serverName;<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /*<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * hostname specified by hostname config<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  protected String useThisHostnameInstead;<a name="line.444"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * against  Master.<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   */<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  protected ServerName serverName;<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /*<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * hostname specified by hostname config<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  protected String useThisHostnameInstead;<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  // key to the config parameter of server hostname<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  // both master and region server<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.441"></a>
+<span class="sourceLineNo">442</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.444"></a>
 <span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  // key to the config parameter of server hostname<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  // both master and region server<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  // Exception will be thrown if both are used.<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * This servers startcode.<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  protected final long startcode;<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * Unique identifier for the cluster we are a part of.<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  protected String clusterId;<a name="line.467"></a>
+<span class="sourceLineNo">446</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  // Exception will be thrown if both are used.<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * This servers startcode.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  protected final long startcode;<a name="line.454"></a>
+<span class="sourceLineNo">455</span><a name="line.455"></a>
+<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
+<span class="sourceLineNo">457</span>   * Unique identifier for the cluster we are a part of.<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   */<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  protected String clusterId;<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  /**<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * Chore to clean periodically the moved region list<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>  // chore for refreshing store files for secondary regions<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  private StorefileRefresherChore storefileRefresher;<a name="line.467"></a>
 <span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Chore to clean periodically the moved region list<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  // chore for refreshing store files for secondary regions<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  private StorefileRefresherChore storefileRefresher;<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  private RegionServerCoprocessorHost rsHost;<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>  /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * HBASE-3787) are:<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   */<a name="line.501"></a>
-<span class="sourceLineNo">502</span>  final ServerNonceManager nonceManager;<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>  private UserProvider userProvider;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  protected final RSRpcServices rpcServices;<a name="line.506"></a>
+<span class="sourceLineNo">469</span>  private RegionServerCoprocessorHost rsHost;<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /**<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   * HBASE-3787) are:<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.484"></a>
+<span class="sourceLineNo">485</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.485"></a>
+<span class="sourceLineNo">486</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  final ServerNonceManager nonceManager;<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  private UserProvider userProvider;<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>  protected final RSRpcServices rpcServices;<a name="line.498"></a>
+<span class="sourceLineNo">499</span><a name="line.499"></a>
+<span class="sourceLineNo">500</span>  protected CoordinatedStateManager csm;<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>  /**<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  protected final ConfigurationManager configurationManager;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  protected CoordinatedStateManager csm;<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   */<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  protected final ConfigurationManager configurationManager;<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.517"></a>
+<span class="sourceLineNo">508</span>  @VisibleForTesting<a name="line.508"></a>
+<span class="sourceLineNo">509</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.509"></a>
+<span class="sourceLineNo">510</span><a name="line.510"></a>
+<span class="sourceLineNo">511</span>  private volatile ThroughputController flushThroughputController;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.513"></a>
+<span class="sourceLineNo">514</span><a name="line.514"></a>
+<span class="sourceLineNo">515</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.515"></a>
+<span class="sourceLineNo">516</span><a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.517"></a>
 <span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  private volatile ThroughputController flushThroughputController;<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   */<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private final boolean masterless;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  /**<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * Starts a HRegionServer at the default location<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  // Don't start any services or managers in here in the Constructor.<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    super("RegionServer");  // thread name<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TraceUtil.initTracer(conf);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    try {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      this.startcode = System.currentTimeMillis();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      this.conf = conf;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.fsOk = true;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      HFile.checkHFileVersion(this.conf);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      checkCodecs(this.conf);<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.554"></a>
+<span class="sourceLineNo">519</span>  /**<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.523"></a>
+<span class="sourceLineNo">524</span>   */<a name="line.524"></a>
+<span class="sourceLineNo">525</span>  private final boolean masterless;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>   * Starts a HRegionServer at the default location<a name="line.529"></a>
+<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>  // Don't start any services or managers in here in the Constructor.<a name="line.531"></a>
+<span class="sourceLineNo">532</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    super("RegionServer");  // thread name<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    TraceUtil.initTracer(conf);<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    try {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      this.startcode = System.currentTimeMillis();<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      this.conf = conf;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>      this.fsOk = true;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      HFile.checkHFileVersion(this.conf);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      checkCodecs(this.conf);<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.546"></a>
+<span class="sourceLineNo">547</span><a name="line.547"></a>
+<span class="sourceLineNo">548</span>      // Disable usage of meta replicas in the regionserver<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      // Config'ed params<a name="line.550"></a>
+<span class="sourceLineNo">551</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.553"></a>
+<span class="sourceLineNo">554</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.554"></a>
 <span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>      // Disable usage of meta replicas in the regionserver<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      // Config'ed params<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.564"></a>
+<span class="sourceLineNo">556</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.556"></a>
+<span class="sourceLineNo">557</span><a name="line.557"></a>
+<span class="sourceLineNo">558</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.561"></a>
+<span class="sourceLineNo">562</span><a name="line.562"></a>
+<span class="sourceLineNo">563</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.564"></a>
 <span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.567"></a>
+<span class="sourceLineNo">566</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.566"></a>
+<span class="sourceLineNo">567</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.567"></a>
 <span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.571"></a>
-<span class="sourceLineNo">572</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.575"></a>
-<span class="sourceLineNo">576</span><a name="line.576"></a>
-<span class="sourceLineNo">577</span>      this.abortRequested = false;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      this.stopped = false;<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      rpcServices = createRpcServices();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String hostName =<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.583"></a>
-<span class="sourceLineNo">584</span>              : this.useThisHostnameInstead;<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // login the zookeeper client principal (if using security)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      // login the server principal (if using secure Hadoop)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      login(userProvider, hostName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
-<span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
-<span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
-<span class="sourceLineNo">608</span><a name="line.608"></a>
-<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
-<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
-<span class="sourceLineNo">633</span><a name="line.633"></a>
-<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
-<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
-<span class="sourceLineNo">661</span><a name="line.661"></a>
-<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span><a name="line.733"></a>
-<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
-<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
-<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
-<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">569</span>      this.abortRequested = false;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      this.stopped = false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span><a name="line.571"></a>
+<span class="sourceLineNo">572</span>      rpcServices = createRpcServices();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      String hostName =<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.575"></a>
+<span class="sourceLineNo">576</span>              : this.useThisHostnameInstead;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.579"></a>
+<span class="sourceLineNo">580</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>      // login the zookeeper client principal (if using security)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.583"></a>
+<span class="sourceLineNo">584</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.584"></a>
+<span class="sourceLineNo">585</span>      // login the server principal (if using secure Hadoop)<a name="line.585"></a>
+<span class="sourceLineNo">586</span>      login(userProvider, hostName);<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      // init superusers and add the server principal (if using security)<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      // or process owner as default super user.<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      Superusers.initialize(conf);<a name="line.589"></a>
+<span class="sourceLineNo">590</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.590"></a>
+<span class="sourceLineNo">591</span><a name="line.591"></a>
+<span class="sourceLineNo">592</span>      boolean isMasterNotCarryTable =<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>      // no need to instantiate global block cache when master not carry table<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      if (!isMasterNotCarryTable) {<a name="line.595"></a>
+<span class="sourceLineNo">596</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      }<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      cacheConfig = new CacheConfig(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.599"></a>
+<span class="sourceLineNo">600</span><a name="line.600"></a>
+<span class="sourceLineNo">601</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.601"></a>
+<span class="sourceLineNo">602</span>        @Override<a name="line.602"></a>
+<span class="sourceLineNo">603</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      };<a name="line.606"></a>
+<span class="sourceLineNo">607</span><a name="line.607"></a>
+<span class="sourceLineNo">608</span>      initializeFileSystem();<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.609"></a>
+<span class="sourceLineNo">610</span><a name="line.610"></a>
+<span class="sourceLineNo">611</span>      this.configurationManager = new ConfigurationManager();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.612"></a>
+<span class="sourceLineNo">613</span><a name="line.613"></a>
+<span class="sourceLineNo">614</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        // Open connection to zookeeper and set primary watcher<a name="line.616"></a>
+<span class="sourceLineNo">617</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        if (!this.masterless) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.621"></a>
+<span class="sourceLineNo">622</span><a name="line.622"></a>
+<span class="sourceLineNo">623</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          masterAddressTracker.start();<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          clusterStatusTracker.start();<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        } else {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          masterAddressTracker = null;<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          clusterStatusTracker = null;<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        }<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        zooKeeper = null;<a name="line.633"></a>
+<span class="sourceLineNo">634</span>        masterAddressTracker = null;<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        clusterStatusTracker = null;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      this.rpcServices.start(zooKeeper);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      // class HRS. TODO.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      this.choreService = new ChoreService(getName(), true);<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      this.executorService = new ExecutorService(getName());<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      putUpWebUI();<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    } catch (Throwable t) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // cause of failed startup is lost.<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      LOG.error("Failed construction RegionServer", t);<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      throw t;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
+<span class="sourceLineNo">653</span><a name="line.653"></a>
+<span class="sourceLineNo">654</span>  // HMaster should override this method to load the specific config for master<a name="line.654"></a>
+<span class="sourceLineNo">655</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.659"></a>
+<span class="sourceLineNo">660</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.660"></a>
+<span class="sourceLineNo">661</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.661"></a>
+<span class="sourceLineNo">662</span>        throw new IOException(msg);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>      } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>        return rpcServices.isa.getHostName();<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      return hostname;<a name="line.667"></a>
+<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
+<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
+<span class="sourceLineNo">670</span><a name="line.670"></a>
+<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>   * If running on Windows, do windows-specific setup.<a name="line.672"></a>
+<span class="sourceLineNo">673</span>   */<a name="line.673"></a>
+<span class="sourceLineNo">674</span>  private static void

<TRUNCATED>

[06/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525

<TRUNCATED>

[08/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>     

<TRUNCATED>

[11/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(g

<TRUNCATED>

[41/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
index 5572799..1b532de 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4175">HBaseFsck.ErrorReporter.ERROR_CODE</a>
+<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4169">HBaseFsck.ErrorReporter.ERROR_CODE</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;</pre>
 </li>
 </ul>
@@ -315,7 +315,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>UNKNOWN</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">UNKNOWN</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">UNKNOWN</a></pre>
 </li>
 </ul>
 <a name="NO_META_REGION">
@@ -324,7 +324,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_META_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">NO_META_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NO_META_REGION</a></pre>
 </li>
 </ul>
 <a name="NULL_META_REGION">
@@ -333,7 +333,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NULL_META_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">NULL_META_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NULL_META_REGION</a></pre>
 </li>
 </ul>
 <a name="NO_VERSION_FILE">
@@ -342,7 +342,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_VERSION_FILE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">NO_VERSION_FILE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NO_VERSION_FILE</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_META_HDFS">
@@ -351,7 +351,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_META_HDFS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">NOT_IN_META_HDFS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NOT_IN_META_HDFS</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_META">
@@ -360,7 +360,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_META</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">NOT_IN_META</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NOT_IN_META</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_META_OR_DEPLOYED">
@@ -369,7 +369,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_META_OR_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">NOT_IN_META_OR_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">NOT_IN_META_OR_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_HDFS_OR_DEPLOYED">
@@ -378,7 +378,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_HDFS_OR_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">NOT_IN_HDFS_OR_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">NOT_IN_HDFS_OR_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_HDFS">
@@ -387,7 +387,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_HDFS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">NOT_IN_HDFS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">NOT_IN_HDFS</a></pre>
 </li>
 </ul>
 <a name="SERVER_DOES_NOT_MATCH_META">
@@ -396,7 +396,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SERVER_DOES_NOT_MATCH_META</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">SERVER_DOES_NOT_MATCH_META</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">SERVER_DOES_NOT_MATCH_META</a></pre>
 </li>
 </ul>
 <a name="NOT_DEPLOYED">
@@ -405,7 +405,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">NOT_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">NOT_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="MULTI_DEPLOYED">
@@ -414,7 +414,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>MULTI_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">MULTI_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">MULTI_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="SHOULD_NOT_BE_DEPLOYED">
@@ -423,7 +423,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SHOULD_NOT_BE_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">SHOULD_NOT_BE_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">SHOULD_NOT_BE_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="MULTI_META_REGION">
@@ -432,7 +432,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>MULTI_META_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">MULTI_META_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">MULTI_META_REGION</a></pre>
 </li>
 </ul>
 <a name="RS_CONNECT_FAILURE">
@@ -441,7 +441,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>RS_CONNECT_FAILURE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">RS_CONNECT_FAILURE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">RS_CONNECT_FAILURE</a></pre>
 </li>
 </ul>
 <a name="FIRST_REGION_STARTKEY_NOT_EMPTY">
@@ -450,7 +450,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>FIRST_REGION_STARTKEY_NOT_EMPTY</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4179">FIRST_REGION_STARTKEY_NOT_EMPTY</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">FIRST_REGION_STARTKEY_NOT_EMPTY</a></pre>
 </li>
 </ul>
 <a name="LAST_REGION_ENDKEY_NOT_EMPTY">
@@ -459,7 +459,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LAST_REGION_ENDKEY_NOT_EMPTY</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4179">LAST_REGION_ENDKEY_NOT_EMPTY</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">LAST_REGION_ENDKEY_NOT_EMPTY</a></pre>
 </li>
 </ul>
 <a name="DUPE_STARTKEYS">
@@ -468,7 +468,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>DUPE_STARTKEYS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4179">DUPE_STARTKEYS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">DUPE_STARTKEYS</a></pre>
 </li>
 </ul>
 <a name="HOLE_IN_REGION_CHAIN">
@@ -477,7 +477,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>HOLE_IN_REGION_CHAIN</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4180">HOLE_IN_REGION_CHAIN</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">HOLE_IN_REGION_CHAIN</a></pre>
 </li>
 </ul>
 <a name="OVERLAP_IN_REGION_CHAIN">
@@ -486,7 +486,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>OVERLAP_IN_REGION_CHAIN</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4180">OVERLAP_IN_REGION_CHAIN</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">OVERLAP_IN_REGION_CHAIN</a></pre>
 </li>
 </ul>
 <a name="REGION_CYCLE">
@@ -495,7 +495,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_CYCLE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4180">REGION_CYCLE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">REGION_CYCLE</a></pre>
 </li>
 </ul>
 <a name="DEGENERATE_REGION">
@@ -504,7 +504,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>DEGENERATE_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4180">DEGENERATE_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">DEGENERATE_REGION</a></pre>
 </li>
 </ul>
 <a name="ORPHAN_HDFS_REGION">
@@ -513,7 +513,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>ORPHAN_HDFS_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4181">ORPHAN_HDFS_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">ORPHAN_HDFS_REGION</a></pre>
 </li>
 </ul>
 <a name="LINGERING_SPLIT_PARENT">
@@ -522,7 +522,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LINGERING_SPLIT_PARENT</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4181">LINGERING_SPLIT_PARENT</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">LINGERING_SPLIT_PARENT</a></pre>
 </li>
 </ul>
 <a name="NO_TABLEINFO_FILE">
@@ -531,7 +531,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_TABLEINFO_FILE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4181">NO_TABLEINFO_FILE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">NO_TABLEINFO_FILE</a></pre>
 </li>
 </ul>
 <a name="LINGERING_REFERENCE_HFILE">
@@ -540,7 +540,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LINGERING_REFERENCE_HFILE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4181">LINGERING_REFERENCE_HFILE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">LINGERING_REFERENCE_HFILE</a></pre>
 </li>
 </ul>
 <a name="LINGERING_HFILELINK">
@@ -549,7 +549,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LINGERING_HFILELINK</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4182">LINGERING_HFILELINK</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">LINGERING_HFILELINK</a></pre>
 </li>
 </ul>
 <a name="WRONG_USAGE">
@@ -558,7 +558,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>WRONG_USAGE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4182">WRONG_USAGE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">WRONG_USAGE</a></pre>
 </li>
 </ul>
 <a name="EMPTY_META_CELL">
@@ -567,7 +567,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>EMPTY_META_CELL</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4182">EMPTY_META_CELL</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">EMPTY_META_CELL</a></pre>
 </li>
 </ul>
 <a name="EXPIRED_TABLE_LOCK">
@@ -576,7 +576,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>EXPIRED_TABLE_LOCK</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4182">EXPIRED_TABLE_LOCK</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">EXPIRED_TABLE_LOCK</a></pre>
 </li>
 </ul>
 <a name="BOUNDARIES_ERROR">
@@ -585,7 +585,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>BOUNDARIES_ERROR</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4182">BOUNDARIES_ERROR</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">BOUNDARIES_ERROR</a></pre>
 </li>
 </ul>
 <a name="ORPHAN_TABLE_STATE">
@@ -594,7 +594,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>ORPHAN_TABLE_STATE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4183">ORPHAN_TABLE_STATE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">ORPHAN_TABLE_STATE</a></pre>
 </li>
 </ul>
 <a name="NO_TABLE_STATE">
@@ -603,7 +603,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_TABLE_STATE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4183">NO_TABLE_STATE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">NO_TABLE_STATE</a></pre>
 </li>
 </ul>
 <a name="UNDELETED_REPLICATION_QUEUE">
@@ -612,7 +612,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>UNDELETED_REPLICATION_QUEUE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4183">UNDELETED_REPLICATION_QUEUE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">UNDELETED_REPLICATION_QUEUE</a></pre>
 </li>
 </ul>
 <a name="DUPE_ENDKEYS">
@@ -621,7 +621,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>DUPE_ENDKEYS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4183">DUPE_ENDKEYS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">DUPE_ENDKEYS</a></pre>
 </li>
 </ul>
 <a name="UNSUPPORTED_OPTION">
@@ -630,7 +630,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>UNSUPPORTED_OPTION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4184">UNSUPPORTED_OPTION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">UNSUPPORTED_OPTION</a></pre>
 </li>
 </ul>
 <a name="INVALID_TABLE">
@@ -639,7 +639,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>INVALID_TABLE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4184">INVALID_TABLE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">INVALID_TABLE</a></pre>
 </li>
 </ul>
 </li>
@@ -656,7 +656,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3899">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3893">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -676,7 +676,7 @@ for (HBaseFsck.ErrorReporter.ERROR_CODE c : HBaseFsck.ErrorReporter.ERROR_CODE.v
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3899">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3893">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index ec6a588..2c08d61 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4174">HBaseFsck.ErrorReporter</a></pre>
+<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4168">HBaseFsck.ErrorReporter</a></pre>
 </li>
 </ul>
 </div>
@@ -234,7 +234,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>clear</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4186">clear</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4180">clear</a>()</pre>
 </li>
 </ul>
 <a name="report-java.lang.String-">
@@ -243,7 +243,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>report</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4187">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4181">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
 <a name="reportError-java.lang.String-">
@@ -252,7 +252,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4188">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4182">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
 <a name="reportError-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE-java.lang.String-">
@@ -261,7 +261,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4189">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4183">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
@@ -271,7 +271,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4190">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4184">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 </li>
@@ -282,7 +282,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4191">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4185">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info)</pre>
@@ -294,7 +294,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4192">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4186">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info1,
@@ -307,7 +307,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>summarize</h4>
-<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4199">summarize</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4193">summarize</a>()</pre>
 </li>
 </ul>
 <a name="detail-java.lang.String-">
@@ -316,7 +316,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>detail</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4200">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;details)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4194">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;details)</pre>
 </li>
 </ul>
 <a name="getErrorList--">
@@ -325,7 +325,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>getErrorList</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4201">getErrorList</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4195">getErrorList</a>()</pre>
 </li>
 </ul>
 <a name="progress--">
@@ -334,7 +334,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>progress</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4202">progress</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4196">progress</a>()</pre>
 </li>
 </ul>
 <a name="print-java.lang.String-">
@@ -343,7 +343,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>print</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4203">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4197">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
 <a name="resetErrors--">
@@ -352,7 +352,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>resetErrors</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4204">resetErrors</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4198">resetErrors</a>()</pre>
 </li>
 </ul>
 <a name="tableHasErrors-org.apache.hadoop.hbase.util.HBaseFsck.TableInfo-">
@@ -361,7 +361,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>tableHasErrors</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4205">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4199">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
index d104a0a..088a55c 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.418">HBaseFsck.FileLockCallable</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.417">HBaseFsck.FileLockCallable</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;org.apache.hadoop.fs.FSDataOutputStream&gt;</pre>
 </li>
@@ -225,7 +225,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>retryCounter</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/util/RetryCounter.html" title="class in org.apache.hadoop.hbase.util">RetryCounter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.419">retryCounter</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/util/RetryCounter.html" title="class in org.apache.hadoop.hbase.util">RetryCounter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.418">retryCounter</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -234,7 +234,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.420">conf</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.419">conf</a></pre>
 </li>
 </ul>
 <a name="hbckLockPath">
@@ -243,7 +243,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hbckLockPath</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.421">hbckLockPath</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.420">hbckLockPath</a></pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileLockCallable</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.423">FileLockCallable</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.422">FileLockCallable</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                         <a href="../../../../../org/apache/hadoop/hbase/util/RetryCounter.html" title="class in org.apache.hadoop.hbase.util">RetryCounter</a>&nbsp;retryCounter)</pre>
 </li>
 </ul>
@@ -278,7 +278,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>getHbckLockPath</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.431">getHbckLockPath</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.430">getHbckLockPath</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>Will be <code>null</code> unless you call <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#call--"><code>call()</code></a></dd>
@@ -291,7 +291,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.436">call</a>()
+<pre>public&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.435">call</a>()
                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -307,7 +307,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createFileWithRetries</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.461">createFileWithRetries</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private&nbsp;org.apache.hadoop.fs.FSDataOutputStream&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html#line.460">createFileWithRetries</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                                       org.apache.hadoop.fs.Path&nbsp;hbckLockFilePath,
                                                                       org.apache.hadoop.fs.permission.FsPermission&nbsp;defaultPerms)
                                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
index ae195fe..5a17750 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4942">HBaseFsck.HBaseFsckTool</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4936">HBaseFsck.HBaseFsckTool</a>
 extends org.apache.hadoop.conf.Configured
 implements org.apache.hadoop.util.Tool</pre>
 <div class="block">This is a Tool wrapper that gathers -Dxxx=yyy configuration settings from the command line.</div>
@@ -207,7 +207,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HBaseFsckTool</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4943">HBaseFsckTool</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4937">HBaseFsckTool</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 </li>
@@ -224,7 +224,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4945">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4939">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index 7b862f1..8dffb7c 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3913">HBaseFsck.HbckInfo</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3907">HBaseFsck.HbckInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" title="interface in org.apache.hadoop.hbase.util">KeyRange</a></pre>
 <div class="block">Maintain information about a particular region.  It gathers information
@@ -305,7 +305,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>metaEntry</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3914">metaEntry</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3908">metaEntry</a></pre>
 </li>
 </ul>
 <a name="hdfsEntry">
@@ -314,7 +314,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsEntry</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HdfsEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3915">hdfsEntry</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HdfsEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3909">hdfsEntry</a></pre>
 </li>
 </ul>
 <a name="deployedEntries">
@@ -323,7 +323,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedEntries</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.OnlineEntry</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3916">deployedEntries</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.OnlineEntry</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3910">deployedEntries</a></pre>
 </li>
 </ul>
 <a name="deployedOn">
@@ -332,7 +332,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedOn</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3917">deployedOn</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3911">deployedOn</a></pre>
 </li>
 </ul>
 <a name="skipChecks">
@@ -341,7 +341,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>skipChecks</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3918">skipChecks</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3912">skipChecks</a></pre>
 </li>
 </ul>
 <a name="isMerged">
@@ -350,7 +350,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>isMerged</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3919">isMerged</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3913">isMerged</a></pre>
 </li>
 </ul>
 <a name="deployedReplicaId">
@@ -359,7 +359,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedReplicaId</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3920">deployedReplicaId</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3914">deployedReplicaId</a></pre>
 </li>
 </ul>
 <a name="primaryHRIForDeployedReplica">
@@ -368,7 +368,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockListLast">
 <li class="blockList">
 <h4>primaryHRIForDeployedReplica</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3921">primaryHRIForDeployedReplica</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3915">primaryHRIForDeployedReplica</a></pre>
 </li>
 </ul>
 </li>
@@ -385,7 +385,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HbckInfo</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3923">HbckInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a>&nbsp;metaEntry)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3917">HbckInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a>&nbsp;metaEntry)</pre>
 </li>
 </ul>
 </li>
@@ -402,7 +402,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getReplicaId</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3927">getReplicaId</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3921">getReplicaId</a>()</pre>
 </li>
 </ul>
 <a name="addServer-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.hbase.ServerName-">
@@ -411,7 +411,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>addServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3931">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3925">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri,
                       <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server)</pre>
 </li>
 </ul>
@@ -421,7 +421,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3944">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3938">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -434,7 +434,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getStartKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3956">getStartKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3950">getStartKey</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html#getStartKey--">getStartKey</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" title="interface in org.apache.hadoop.hbase.util">KeyRange</a></code></dd>
@@ -447,7 +447,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getEndKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3968">getEndKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3962">getEndKey</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html#getEndKey--">getEndKey</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" title="interface in org.apache.hadoop.hbase.util">KeyRange</a></code></dd>
@@ -460,7 +460,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3979">getTableName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3973">getTableName</a>()</pre>
 </li>
 </ul>
 <a name="getRegionNameAsString--">
@@ -469,7 +469,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionNameAsString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3996">getRegionNameAsString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3990">getRegionNameAsString</a>()</pre>
 </li>
 </ul>
 <a name="getRegionName--">
@@ -478,7 +478,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionName</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4012">getRegionName</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4006">getRegionName</a>()</pre>
 </li>
 </ul>
 <a name="getPrimaryHRIForDeployedReplica--">
@@ -487,7 +487,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getPrimaryHRIForDeployedReplica</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4026">getPrimaryHRIForDeployedReplica</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4020">getPrimaryHRIForDeployedReplica</a>()</pre>
 </li>
 </ul>
 <a name="getHdfsRegionDir--">
@@ -496,7 +496,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getHdfsRegionDir</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4030">getHdfsRegionDir</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4024">getHdfsRegionDir</a>()</pre>
 </li>
 </ul>
 <a name="containsOnlyHdfsEdits--">
@@ -505,7 +505,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>containsOnlyHdfsEdits</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4037">containsOnlyHdfsEdits</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4031">containsOnlyHdfsEdits</a>()</pre>
 </li>
 </ul>
 <a name="isHdfsRegioninfoPresent--">
@@ -514,7 +514,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>isHdfsRegioninfoPresent</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4044">isHdfsRegioninfoPresent</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4038">isHdfsRegioninfoPresent</a>()</pre>
 </li>
 </ul>
 <a name="getModTime--">
@@ -523,7 +523,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getModTime</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4051">getModTime</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4045">getModTime</a>()</pre>
 </li>
 </ul>
 <a name="getHdfsHRI--">
@@ -532,7 +532,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getHdfsHRI</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4058">getHdfsHRI</a>()</pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4052">getHdfsHRI</a>()</pre>
 </li>
 </ul>
 <a name="setSkipChecks-boolean-">
@@ -541,7 +541,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>setSkipChecks</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4065">setSkipChecks</a>(boolean&nbsp;skipChecks)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4059">setSkipChecks</a>(boolean&nbsp;skipChecks)</pre>
 </li>
 </ul>
 <a name="isSkipChecks--">
@@ -550,7 +550,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>isSkipChecks</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4069">isSkipChecks</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4063">isSkipChecks</a>()</pre>
 </li>
 </ul>
 <a name="setMerged-boolean-">
@@ -559,7 +559,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>setMerged</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4073">setMerged</a>(boolean&nbsp;isMerged)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4067">setMerged</a>(boolean&nbsp;isMerged)</pre>
 </li>
 </ul>
 <a name="isMerged--">
@@ -568,7 +568,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isMerged</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4077">isMerged</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4071">isMerged</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
index 4c74fed..11c97b8 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
@@ -107,7 +107,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3888">HBaseFsck.HdfsEntry</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3882">HBaseFsck.HdfsEntry</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Stores the regioninfo entries from HDFS</div>
 </li>
@@ -201,7 +201,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hri</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3889">hri</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3883">hri</a></pre>
 </li>
 </ul>
 <a name="hdfsRegionDir">
@@ -210,7 +210,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsRegionDir</h4>
-<pre>org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3890">hdfsRegionDir</a></pre>
+<pre>org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3884">hdfsRegionDir</a></pre>
 </li>
 </ul>
 <a name="hdfsRegionDirModTime">
@@ -219,7 +219,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsRegionDirModTime</h4>
-<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3891">hdfsRegionDirModTime</a></pre>
+<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3885">hdfsRegionDirModTime</a></pre>
 </li>
 </ul>
 <a name="hdfsRegioninfoFilePresent">
@@ -228,7 +228,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsRegioninfoFilePresent</h4>
-<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3892">hdfsRegioninfoFilePresent</a></pre>
+<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3886">hdfsRegioninfoFilePresent</a></pre>
 </li>
 </ul>
 <a name="hdfsOnlyEdits">
@@ -237,7 +237,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hdfsOnlyEdits</h4>
-<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3893">hdfsOnlyEdits</a></pre>
+<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3887">hdfsOnlyEdits</a></pre>
 </li>
 </ul>
 </li>
@@ -254,7 +254,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HdfsEntry</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3888">HdfsEntry</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3882">HdfsEntry</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
index 44679a4..8d6a0bc 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3837">HBaseFsck.MetaEntry</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3831">HBaseFsck.MetaEntry</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a></pre>
 <div class="block">Stores the regioninfo entries scanned from META</div>
 </li>
@@ -264,7 +264,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>regionServer</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3838">regionServer</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3832">regionServer</a></pre>
 </li>
 </ul>
 <a name="modTime">
@@ -273,7 +273,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>modTime</h4>
-<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3839">modTime</a></pre>
+<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3833">modTime</a></pre>
 </li>
 </ul>
 <a name="splitA">
@@ -282,7 +282,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>splitA</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3840">splitA</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3834">splitA</a></pre>
 </li>
 </ul>
 <a name="splitB">
@@ -291,7 +291,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockListLast">
 <li class="blockList">
 <h4>splitB</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3840">splitB</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3834">splitB</a></pre>
 </li>
 </ul>
 </li>
@@ -308,7 +308,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>MetaEntry</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3842">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3836">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
                  <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;regionServer,
                  long&nbsp;modTime)</pre>
 </li>
@@ -319,7 +319,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MetaEntry</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3846">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3840">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
                  <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;regionServer,
                  long&nbsp;modTime,
                  <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;splitA,
@@ -340,7 +340,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3856">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3850">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html#equals-java.lang.Object-">equals</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a></code></dd>
@@ -355,7 +355,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3870">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3864">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html#hashCode--">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a></code></dd>


[19/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a

<TRUNCATED>

[43/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 8d1bfab..cd14215 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":9,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":9,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 ":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":9,"i132":10,"i133":9,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":9,"i153":10,"i154":9,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":9,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":9,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 ":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":9,"i131":10,"i132":9,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":9,"i152":10,"i153":9,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -351,82 +351,78 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#MASTERLESS_CONFIG_NAME">MASTERLESS_CONFIG_NAME</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#metaTableLocator">metaTableLocator</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#metricsRegionServer">metricsRegionServer</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsTable.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsTable</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#metricsTable">metricsTable</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/mob/MobCacheConfig.html" title="class in org.apache.hadoop.hbase.mob">MobCacheConfig</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#mobCacheConfig">mobCacheConfig</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionInfo</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegions">movedRegions</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionsCleaner</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegionsCleaner">movedRegionsCleaner</a></span></code>
 <div class="block">Chore to clean periodically the moved region list</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#msgInterval">msgInterval</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/ServerNonceManager.html" title="class in org.apache.hadoop.hbase.regionserver">ServerNonceManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#nonceManager">nonceManager</a></span></code>
 <div class="block">Nonce manager.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#nonceManagerChore">nonceManagerChore</a></span></code>
 <div class="block">The nonce manager chore.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#numRegionsToReport">numRegionsToReport</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#numRetries">numRetries</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicBoolean</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#online">online</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onlineRegions">onlineRegions</a></span></code>
 <div class="block">Map of regions currently being served by this region server.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#operationTimeout">operationTimeout</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/util/JvmPauseMonitor.html" title="class in org.apache.hadoop.hbase.util">JvmPauseMonitor</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#pauseMonitor">pauseMonitor</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#periodicFlusher">periodicFlusher</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/RemoteProcedureResultReporter.html" title="class in org.apache.hadoop.hbase.regionserver">RemoteProcedureResultReporter</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#procedureResultReporter">procedureResultReporter</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>[]&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionFavoredNodesMap">regionFavoredNodesMap</a></span></code>
 <div class="block">Map of encoded region names to the DataNode locations they should be hosted on
@@ -434,177 +430,177 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
  API (create() that takes favored nodes as hints for placing file blocks).</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#REGIONSERVER">REGIONSERVER</a></span></code>
 <div class="block">region server process name</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerAccounting</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionServerAccounting">regionServerAccounting</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionsInTransitionInRS">regionsInTransitionInRS</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSinkService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSinkService</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSinkHandler">replicationSinkHandler</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSourceService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSourceService</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSourceHandler">replicationSourceHandler</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rootDir">rootDir</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rowlocks">rowlocks</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/ipc/RpcClient.html" title="interface in org.apache.hadoop.hbase.ipc">RpcClient</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcClient">rpcClient</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/ipc/RpcControllerFactory.html" title="class in org.apache.hadoop.hbase.ipc">RpcControllerFactory</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcControllerFactory">rpcControllerFactory</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.html" title="class in org.apache.hadoop.hbase.client">RpcRetryingCallerFactory</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcRetryingCallerFactory">rpcRetryingCallerFactory</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcServices">rpcServices</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY">RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#RS_HOSTNAME_KEY">RS_HOSTNAME_KEY</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerCoprocessorHost</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rsHost">rsHost</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.html" title="class in org.apache.hadoop.hbase.procedure">RegionServerProcedureManagerHost</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rspmHost">rspmHost</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.html" title="class in org.apache.hadoop.hbase.quotas">RegionServerRpcQuotaManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rsQuotaManager">rsQuotaManager</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.html" title="class in org.apache.hadoop.hbase.quotas">RegionServerSpaceQuotaManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rsSpaceQuotaManager">rsSpaceQuotaManager</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rssStub">rssStub</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.html" title="class in org.apache.hadoop.hbase.regionserver">SecureBulkLoadManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#secureBulkLoadManager">secureBulkLoadManager</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#serverName">serverName</a></span></code>
 <div class="block">The server name the Master sees us as.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#shortOperationTimeout">shortOperationTimeout</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#shutDown">shutDown</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/util/Sleeper.html" title="class in org.apache.hadoop.hbase.util">Sleeper</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sleeper">sleeper</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/trace/SpanReceiverHost.html" title="class in org.apache.hadoop.hbase.trace">SpanReceiverHost</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#spanReceiverHost">spanReceiverHost</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.html" title="class in org.apache.hadoop.hbase.regionserver">SplitLogWorker</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#splitLogWorker">splitLogWorker</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#startcode">startcode</a></span></code>
 <div class="block">This servers startcode.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stopped">stopped</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stopping">stopping</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.html" title="class in org.apache.hadoop.hbase.regionserver">StorefileRefresherChore</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#storefileRefresher">storefileRefresher</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tableDescriptors">tableDescriptors</a></span></code>
 <div class="block">Go here to get table descriptors.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#TEST_SKIP_REPORTING_TRANSITION">TEST_SKIP_REPORTING_TRANSITION</a></span></code>
 <div class="block">For testing only!  Set to true to skip notifying region assignment to master .</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#threadWakeFrequency">threadWakeFrequency</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#TIMEOUT_REGION_MOVED">TIMEOUT_REGION_MOVED</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.UncaughtExceptionHandler.html?is-external=true" title="class or interface in java.lang">Thread.UncaughtExceptionHandler</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#uncaughtExceptionHandler">uncaughtExceptionHandler</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#userProvider">userProvider</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#useThisHostnameInstead">useThisHostnameInstead</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFactory">walFactory</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFs">walFs</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/LogRoller.html" title="class in org.apache.hadoop.hbase.regionserver">LogRoller</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRoller">walRoller</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRootDir">walRootDir</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#zooKeeper">zooKeeper</a></span></code>&nbsp;</td>
 </tr>
@@ -989,307 +985,300 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMasterAddressTracker--">getMasterAddressTracker</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i67" class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetaTableLocator--">getMetaTableLocator</a></span>()</code>
-<div class="block">Returns instance of <a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
- running inside this server.</div>
-</td>
-</tr>
-<tr id="i68" class="altColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptorBuilder.html" title="class in org.apache.hadoop.hbase.client">TableDescriptorBuilder</a>,<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptorBuilder.html" title="class in org.apache.hadoop.hbase.client">TableDescriptorBuilder</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetaTableObserver--">getMetaTableObserver</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i69" class="rowColor">
+<tr id="i68" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetrics--">getMetrics</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i70" class="altColor">
+<tr id="i69" class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMostLoadedRegions--">getMostLoadedRegions</a></span>()</code>
 <div class="block">Get the top N most loaded regions this server is serving so we can tell the
  master which regions it can reallocate if we're overloaded.</div>
 </td>
 </tr>
-<tr id="i71" class="rowColor">
+<tr id="i70" class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionInfo</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMovedRegion-java.lang.String-">getMovedRegion</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)</code>&nbsp;</td>
 </tr>
-<tr id="i72" class="altColor">
+<tr id="i71" class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMyEphemeralNodePath--">getMyEphemeralNodePath</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i73" class="rowColor">
+<tr id="i72" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/ServerNonceManager.html" title="class in org.apache.hadoop.hbase.regionserver">ServerNonceManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNonceManager--">getNonceManager</a></span>()</code>
 <div class="block">Only required for "old" log replay; if it's removed, remove this.</div>
 </td>
 </tr>
-<tr id="i74" class="altColor">
+<tr id="i73" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNumberOfOnlineRegions--">getNumberOfOnlineRegions</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i75" class="rowColor">
+<tr id="i74" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegion-byte:A-">getOnlineRegion</a></span>(byte[]&nbsp;regionName)</code>&nbsp;</td>
 </tr>
-<tr id="i76" class="altColor">
+<tr id="i75" class="rowColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegionsAsPrintableString--">getOnlineRegionsAsPrintableString</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i77" class="rowColor">
+<tr id="i76" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegionsLocalContext--">getOnlineRegionsLocalContext</a></span>()</code>
 <div class="block">For tests, web ui and metrics.</div>
 </td>
 </tr>
-<tr id="i78" class="altColor">
+<tr id="i77" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineTables--">getOnlineTables</a></span>()</code>
 <div class="block">Gets the online tables in this RS.</div>
 </td>
 </tr>
-<tr id="i79" class="rowColor">
+<tr id="i78" class="altColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getProcessName--">getProcessName</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i80" class="altColor">
+<tr id="i79" class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-byte:A-">getRegion</a></span>(byte[]&nbsp;regionName)</code>
 <div class="block">Protected Utility method for safely obtaining an HRegion handle.</div>
 </td>
 </tr>
-<tr id="i81" class="rowColor">
+<tr id="i80" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-java.lang.String-">getRegion</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)</code>
 <div class="block">Return <a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver"><code>Region</code></a> instance.</div>
 </td>
 </tr>
-<tr id="i82" class="altColor">
+<tr id="i81" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true" title="class or interface in java.net">InetSocketAddress</a>[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionBlockLocations-java.lang.String-">getRegionBlockLocations</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)</code>&nbsp;</td>
 </tr>
-<tr id="i83" class="rowColor">
+<tr id="i82" class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-byte:A-java.lang.String-">getRegionByEncodedName</a></span>(byte[]&nbsp;regionName,
                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)</code>&nbsp;</td>
 </tr>
-<tr id="i84" class="altColor">
+<tr id="i83" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-java.lang.String-">getRegionByEncodedName</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)</code>&nbsp;</td>
 </tr>
-<tr id="i85" class="rowColor">
+<tr id="i84" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions--">getRegions</a></span>()</code>
 <div class="block">Get all online regions in this RS.</div>
 </td>
 </tr>
-<tr id="i86" class="altColor">
+<tr id="i85" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions-org.apache.hadoop.hbase.TableName-">getRegions</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</code>
 <div class="block">Gets the online regions of the specified table.</div>
 </td>
 </tr>
-<tr id="i87" class="rowColor">
+<tr id="i86" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerAccounting</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerAccounting--">getRegionServerAccounting</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i88" class="altColor">
+<tr id="i87" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerCoprocessorHost</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessorHost--">getRegionServerCoprocessorHost</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i89" class="rowColor">
+<tr id="i88" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessors--">getRegionServerCoprocessors</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i90" class="altColor">
+<tr id="i89" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerMetrics--">getRegionServerMetrics</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i91" class="rowColor">
+<tr id="i90" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.html" title="class in org.apache.hadoop.hbase.quotas">RegionServerRpcQuotaManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerRpcQuotaManager--">getRegionServerRpcQuotaManager</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i92" class="altColor">
+<tr id="i91" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.html" title="class in org.apache.hadoop.hbase.quotas">RegionServerSpaceQuotaManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerSpaceQuotaManager--">getRegionServerSpaceQuotaManager</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i93" class="rowColor">
+<tr id="i92" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true" title="class or interface in java.lang">Boolean</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionsInTransitionInRS--">getRegionsInTransitionInRS</a></span>()</code>
 <div class="block">Get the regions that are currently being opened or closed in the RS</div>
 </td>
 </tr>
-<tr id="i94" class="altColor">
+<tr id="i93" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSinkService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSinkService</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSinkService--">getReplicationSinkService</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i95" class="rowColor">
+<tr id="i94" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSourceService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSourceService</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSourceService--">getReplicationSourceService</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i96" class="altColor">
+<tr id="i95" class="rowColor">
 <td class="colFirst"><code>protected org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRootDir--">getRootDir</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i97" class="rowColor">
+<tr id="i96" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ipc/RpcServerInterface.html" title="interface in org.apache.hadoop.hbase.ipc">RpcServerInterface</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRpcServer--">getRpcServer</a></span>()</code>
 <div class="block">Returns a reference to the region server's RPC server</div>
 </td>
 </tr>
-<tr id="i98" class="altColor">
+<tr id="i97" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRSRpcServices--">getRSRpcServices</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i99" class="rowColor">
+<tr id="i98" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.html" title="class in org.apache.hadoop.hbase.regionserver">SecureBulkLoadManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getSecureBulkLoadManager--">getSecureBulkLoadManager</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i100" class="altColor">
+<tr id="i99" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getServerName--">getServerName</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i101" class="rowColor">
+<tr id="i100" class="altColor">
 <td class="colFirst"><code>long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getStartcode--">getStartcode</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i102" class="altColor">
+<tr id="i101" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getTableDescriptors--">getTableDescriptors</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i103" class="rowColor">
+<tr id="i102" class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getThreadWakeFrequency--">getThreadWakeFrequency</a></span>()</code>
 <div class="block">Interval at which threads should run</div>
 </td>
 </tr>
-<tr id="i104" class="altColor">
+<tr id="i103" class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getUseThisHostnameInstead-org.apache.hadoop.conf.Configuration-">getUseThisHostnameInstead</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
-<tr id="i105" class="rowColor">
+<tr id="i104" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWAL-org.apache.hadoop.hbase.client.RegionInfo-">getWAL</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</code>&nbsp;</td>
 </tr>
-<tr id="i106" class="altColor">
+<tr id="i105" class="rowColor">
 <td class="colFirst"><code>org.apache.hadoop.fs.FileSystem</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALFileSystem--">getWALFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i107" class="rowColor">
+<tr id="i106" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationStatus.html" title="class in org.apache.hadoop.hbase.replication.regionserver">ReplicationStatus</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalGroupsReplicationStatus--">getWalGroupsReplicationStatus</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i108" class="altColor">
+<tr id="i107" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/LogRoller.html" title="class in org.apache.hadoop.hbase.regionserver">LogRoller</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalRoller--">getWalRoller</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i109" class="rowColor">
+<tr id="i108" class="altColor">
 <td class="colFirst"><code>org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALRootDir--">getWALRootDir</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i110" class="altColor">
+<tr id="i109" class="rowColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALs--">getWALs</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i111" class="rowColor">
+<tr id="i110" class="altColor">
 <td class="colFirst"><code>(package private) <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.WriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock.WriteLock</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWriteLock--">getWriteLock</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i112" class="altColor">
+<tr id="i111" class="rowColor">
 <td class="colFirst"><code>private long</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWriteRequestCount--">getWriteRequestCount</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i113" class="rowColor">
+<tr id="i112" class="altColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getZooKeeper--">getZooKeeper</a></span>()</code>
 <div class="block">Gets the ZooKeeper instance for this server.</div>
 </td>
 </tr>
-<tr id="i114" class="altColor">
+<tr id="i113" class="rowColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#handleReportForDutyResponse-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse-">handleReportForDutyResponse</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;c)</code>&nbsp;</td>
 </tr>
-<tr id="i115" class="rowColor">
+<tr id="i114" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeFileSystem--">initializeFileSystem</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i116" class="altColor">
+<tr id="i115" class="rowColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeMemStoreChunkCreator--">initializeMemStoreChunkCreator</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i117" class="rowColor">
+<tr id="i116" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeThreads--">initializeThreads</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i118" class="altColor">
+<tr id="i117" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeZooKeeper--">initializeZooKeeper</a></span>()</code>
 <div class="block">Bring up connection to zk ensemble and then wait until a master for this cluster and then after
  that, wait until cluster 'up' flag has been set.</div>
 </td>
 </tr>
-<tr id="i119" class="rowColor">
+<tr id="i118" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isAborted--">isAborted</a></span>()</code>
 <div class="block">Check if the server or client was aborted.</div>
 </td>
 </tr>
-<tr id="i120" class="altColor">
+<tr id="i119" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isClusterUp--">isClusterUp</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i121" class="rowColor">
+<tr id="i120" class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isHealthCheckerConfigured--">isHealthCheckerConfigured</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i122" class="altColor">
+<tr id="i121" class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isHealthy--">isHealthy</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i123" class="rowColor">
+<tr id="i122" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isOnline--">isOnline</a></span>()</code>
 <div class="block">Report the status of the server.</div>
 </td>
 </tr>
-<tr id="i124" class="altColor">
+<tr id="i123" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isOnlineRegionsEmpty--">isOnlineRegionsEmpty</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i125" class="rowColor">
+<tr id="i124" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isShutDown--">isShutDown</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i126" class="altColor">
+<tr id="i125" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopped--">isStopped</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i127" class="rowColor">
+<tr id="i126" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopping--">isStopping</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i128" class="altColor">
+<tr id="i127" class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#keepLooping--">keepLooping</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i129" class="rowColor">
+<tr id="i128" class="altColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#kill--">kill</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i130" class="altColor">
+<tr id="i129" class="rowColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#login-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-">login</a></span>(<a href="../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;user,
      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;host)</code>&nbsp;</td>
 </tr>
-<tr id="i131" class="rowColor">
+<tr id="i130" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#main-java.lang.String:A-">main</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)</code>&nbsp;</td>
 </tr>
-<tr id="i132" class="altColor">
+<tr id="i131" class="rowColor">
 <td class="colFirst"><code>protected int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegionCleanerPeriod--">movedRegionCleanerPeriod</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i133" class="rowColor">
+<tr id="i132" class="altColor">
 <td class="colFirst"><code>private static &lt;T extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationService</a>&gt;<br>T</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#newReplicationInstance-java.lang.String-java.lang.Class-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.regionserver.HRegionServer-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.wal.WALProvider-">newReplicationInstance</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;classname,
                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;T&gt;&nbsp;xface,
@@ -1300,33 +1289,33 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
                       org.apache.hadoop.fs.Path&nbsp;oldLogDir,
                       <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider</a>&nbsp;walProvider)</code>&nbsp;</td>
 </tr>
-<tr id="i134" class="altColor">
+<tr id="i133" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onConfigurationChange-org.apache.hadoop.conf.Configuration-">onConfigurationChange</a></span>(org.apache.hadoop.conf.Configuration&nbsp;newConf)</code>
 <div class="block">This method would be called by the <a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf"><code>ConfigurationManager</code></a>
  object when the <code>Configuration</code> object is reloaded from disk.</div>
 </td>
 </tr>
-<tr id="i135" class="rowColor">
+<tr id="i134" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#postOpenDeployTasks-org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext-">postOpenDeployTasks</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.PostOpenDeployContext</a>&nbsp;context)</code>
 <div class="block">Tasks to perform after region open to complete deploy of region on regionserver</div>
 </td>
 </tr>
-<tr id="i136" class="altColor">
+<tr id="i135" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#preRegistrationInitialization--">preRegistrationInitialization</a></span>()</code>
 <div class="block">All initialization needed before we go register with Master.<br>
  Do bare minimum.</div>
 </td>
 </tr>
-<tr id="i137" class="rowColor">
+<tr id="i136" class="altColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#putUpWebUI--">putUpWebUI</a></span>()</code>
 <div class="block">Puts up the webui.</div>
 </td>
 </tr>
-<tr id="i138" class="altColor">
+<tr id="i137" class="rowColor">
 <td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/client/locking/EntityLock.html" title="class in org.apache.hadoop.hbase.client.locking">EntityLock</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionLock-java.util.List-java.lang.String-org.apache.hadoop.hbase.Abortable-">regionLock</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;regionInfos,
           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;description,
@@ -1334,34 +1323,34 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
 <div class="block">Master based locks on namespaces/tables/regions.</div>
 </td>
 </tr>
-<tr id="i139" class="rowColor">
+<tr id="i138" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#registerConfigurationObservers--">registerConfigurationObservers</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i140" class="altColor">
+<tr id="i139" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#registerService-com.google.protobuf.Service-">registerService</a></span>(com.google.protobuf.Service&nbsp;instance)</code>
 <div class="block">Registers a new protocol buffer <code>Service</code> subclass as a coprocessor endpoint to be
  available for handling</div>
 </td>
 </tr>
-<tr id="i141" class="rowColor">
+<tr id="i140" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#remoteProcedureComplete-long-java.lang.Throwable-">remoteProcedureComplete</a></span>(long&nbsp;procId,
                        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;error)</code>&nbsp;</td>
 </tr>
-<tr id="i142" class="altColor">
+<tr id="i141" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#removeFromMovedRegions-java.lang.String-">removeFromMovedRegions</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedName)</code>&nbsp;</td>
 </tr>
-<tr id="i143" class="rowColor">
+<tr id="i142" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#removeRegion-org.apache.hadoop.hbase.regionserver.HRegion-org.apache.hadoop.hbase.ServerName-">removeRegion</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;r,
             <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;destination)</code>
 <div class="block">Removes the given Region from the list of onlineRegions.</div>
 </td>
 </tr>
-<tr id="i144" class="altColor">
+<tr id="i143" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportFileArchivalForQuotas-org.apache.hadoop.hbase.TableName-java.util.Collection-">reportFileArchivalForQuotas</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;archivedFiles)</code>
@@ -1369,90 +1358,90 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
  just moved to the archive directory.</div>
 </td>
 </tr>
-<tr id="i145" class="rowColor">
+<tr id="i144" class="altColor">
 <td class="colFirst"><code>private org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportForDuty--">reportForDuty</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i146" class="altColor">
+<tr id="i145" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportProcedureDone-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneRequest-">reportProcedureDone</a></span>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneRequest&nbsp;request)</code>&nbsp;</td>
 </tr>
-<tr id="i147" class="rowColor">
+<tr id="i146" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionSizesForQuotas-org.apache.hadoop.hbase.quotas.RegionSizeStore-">reportRegionSizesForQuotas</a></span>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;regionSizeStore)</code>
 <div class="block">Reports the given map of Regions and their size on the filesystem to the active Master.</div>
 </td>
 </tr>
-<tr id="i148" class="altColor">
+<tr id="i147" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionStateTransition-org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext-">reportRegionStateTransition</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.RegionStateTransitionContext</a>&nbsp;context)</code>
 <div class="block">Notify master that a handler requests to change a region state</div>
 </td>
 </tr>
-<tr id="i149" class="rowColor">
+<tr id="i148" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#run--">run</a></span>()</code>
 <div class="block">The HRegionServer sticks in this loop until closed.</div>
 </td>
 </tr>
-<tr id="i150" class="altColor">
+<tr id="i149" class="rowColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sendShutdownInterrupt--">sendShutdownInterrupt</a></span>()</code>
 <div class="block">Called on stop/abort before closing the cluster connection and meta locator.</div>
 </td>
 </tr>
-<tr id="i151" class="rowColor">
+<tr id="i150" class="altColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupClusterConnection--">setupClusterConnection</a></span>()</code>
 <div class="block">Setup our cluster connection if not already initialized.</div>
 </td>
 </tr>
-<tr id="i152" class="altColor">
+<tr id="i151" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../org/apache/hadoop/hbase/util/NettyEventLoopGroupConfig.html" title="class in org.apache.hadoop.hbase.util">NettyEventLoopGroupConfig</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupNetty-org.apache.hadoop.conf.Configuration-">setupNetty</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
-<tr id="i153" class="rowColor">
+<tr id="i152" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupWALAndReplication--">setupWALAndReplication</a></span>()</code>
 <div class="block">Setup WAL log and replication if enabled.</div>
 </td>
 </tr>
-<tr id="i154" class="altColor">
+<tr id="i153" class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupWindows-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.conf.ConfigurationManager-">setupWindows</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
             <a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;cm)</code>
 <div class="block">If running on Windows, do windows-specific setup.</div>
 </td>
 </tr>
-<tr id="i155" class="rowColor">
+<tr id="i154" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#shutdownWAL-boolean-">shutdownWAL</a></span>(boolean&nbsp;close)</code>&nbsp;</td>
 </tr>
-<tr id="i156" class="altColor">
+<tr id="i155" class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sleep-long-">sleep</a></span>(long&nbsp;millis)</code>&nbsp;</td>
 </tr>
-<tr id="i157" class="rowColor">
+<tr id="i156" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#startHeapMemoryManager--">startHeapMemoryManager</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i158" class="altColor">
+<tr id="i157" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#startReplicationService--">startReplicationService</a></span>()</code>
 <div class="block">Start up replication source and sink handlers.</div>
 </td>
 </tr>
-<tr id="i159" class="rowColor">
+<tr id="i158" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#startServices--">startServices</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i160" class="altColor">
+<tr id="i159" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stop-java.lang.String-">stop</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg)</code>
 <div class="block">Stop this service.</div>
 </td>
 </tr>
-<tr id="i161" class="rowColor">
+<tr id="i160" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stop-java.lang.String-boolean-org.apache.hadoop.hbase.security.User-">stop</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg,
     boolean&nbsp;force,
@@ -1460,63 +1449,63 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
 <div class="block">Stops the regionserver.</div>
 </td>
 </tr>
-<tr id="i162" class="altColor">
+<tr id="i161" class="rowColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stopServiceThreads--">stopServiceThreads</a></span>()</code>
 <div class="block">Wait on all threads to finish.</div>
 </td>
 </tr>
-<tr id="i163" class="rowColor">
+<tr id="i162" class="altColor">
 <td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#toString--">toString</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i164" class="altColor">
+<tr id="i163" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#triggerFlushInPrimaryRegion-org.apache.hadoop.hbase.regionserver.HRegion-">triggerFlushInPrimaryRegion</a></span>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region)</code>
 <div class="block">Trigger a flush in the primary region replica if this region is a secondary replica.</div>
 </td>
 </tr>
-<tr id="i165" class="rowColor">
+<tr id="i164" class="altColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tryRegionServerReport-long-long-">tryRegionServerReport</a></span>(long&nbsp;reportStartTime,
                      long&nbsp;reportEndTime)</code>&nbsp;</td>
 </tr>
-<tr id="i166" class="altColor">
+<tr id="i165" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#unassign-byte:A-">unassign</a></span>(byte[]&nbsp;regionName)</code>
 <div class="block">Unassign the given region from the current regionserver and assign it randomly.</div>
 </td>
 </tr>
-<tr id="i167" class="rowColor">
+<tr id="i166" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateConfiguration--">updateConfiguration</a></span>()</code>
 <div class="block">Reload the configuration from disk.</div>
 </td>
 </tr>
-<tr id="i168" class="altColor">
+<tr id="i167" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateRegionFavoredNodesMapping-java.lang.String-java.util.List-">updateRegionFavoredNodesMapping</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName&gt;&nbsp;favoredNodes)</code>
 <div class="block">Used to update the favored nodes mapping when required.</div>
 </td>
 </tr>
-<tr id="i169" class="rowColor">
+<tr id="i168" class="altColor">
 <td class="colFirst"><code>protected void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#waitForMasterActive--">waitForMasterActive</a></span>()</code>
 <div class="block">Wait for an active Master.</div>
 </td>
 </tr>
-<tr id="i170" class="altColor">
+<tr id="i169" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#waitForServerOnline--">waitForServerOnline</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i171" class="rowColor">
+<tr id="i170" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#waitOnAllRegionsToClose-boolean-">waitOnAllRegionsToClose</a></span>(boolean&nbsp;abort)</code>
 <div class="block">Wait on regions close.</div>
 </td>
 </tr>
-<tr id="i172" class="altColor">
+<tr id="i171" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRollRequestFinished--">walRollRequestFinished</a></span>()</code>
 <div class="block">For testing</div>
@@ -1625,22 +1614,13 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
  ongoing client RPCs.</div>
 </li>
 </ul>
-<a name="metaTableLocator">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>metaTableLocator</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.270">metaTableLocator</a></pre>
-</li>
-</ul>
 <a name="tableDescriptors">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>tableDescriptors</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.275">tableDescriptors</a></pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.267">tableDescriptors</a></pre>
 <div class="block">Go here to get table descriptors.</div>
 </li>
 </ul>
@@ -1650,7 +1630,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
 <ul class="blockList">
 <li class="blockList">
 <h4>replicationSourceHandler</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSourceService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSourceService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.278">replicationSourceHandler</a></pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSourceService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSourceService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.270">replicationSourceHandler</a></pre>
 </li>
 </ul>
 <a name="replicationSinkHandler">
@@ -1659,7 +1639,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionSe
 <ul class="blockList">
 <li class="blockList">
 <h4>replicationSinkHandler</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSinkService.html" title="interface i

<TRUNCATED>

[07/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span c

<TRUNCATED>

[29/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html
index 98a70a6..8f8bcd8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterStatusServlet.html
@@ -30,82 +30,78 @@
 <span class="sourceLineNo">022</span>import java.util.List;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.Map;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.Set;<a name="line.24"></a>
-<span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import javax.servlet.http.HttpServlet;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import javax.servlet.http.HttpServletRequest;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import javax.servlet.http.HttpServletResponse;<a name="line.28"></a>
-<span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.conf.Configuration;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.ServerName;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.tmpl.master.MasterStatusTmpl;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.35"></a>
-<span class="sourceLineNo">036</span><a name="line.36"></a>
-<span class="sourceLineNo">037</span>/**<a name="line.37"></a>
-<span class="sourceLineNo">038</span> * The servlet responsible for rendering the index page of the<a name="line.38"></a>
-<span class="sourceLineNo">039</span> * master.<a name="line.39"></a>
-<span class="sourceLineNo">040</span> */<a name="line.40"></a>
-<span class="sourceLineNo">041</span>@InterfaceAudience.Private<a name="line.41"></a>
-<span class="sourceLineNo">042</span>public class MasterStatusServlet extends HttpServlet {<a name="line.42"></a>
-<span class="sourceLineNo">043</span>  private static final long serialVersionUID = 1L;<a name="line.43"></a>
-<span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>  @Override<a name="line.45"></a>
-<span class="sourceLineNo">046</span>  public void doGet(HttpServletRequest request, HttpServletResponse response)<a name="line.46"></a>
-<span class="sourceLineNo">047</span>    throws IOException<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  {<a name="line.48"></a>
-<span class="sourceLineNo">049</span>    HMaster master = (HMaster) getServletContext().getAttribute(HMaster.MASTER);<a name="line.49"></a>
-<span class="sourceLineNo">050</span>    assert master != null : "No Master in context!";<a name="line.50"></a>
+<span class="sourceLineNo">025</span>import javax.servlet.http.HttpServlet;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import javax.servlet.http.HttpServletRequest;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import javax.servlet.http.HttpServletResponse;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.ServerName;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.tmpl.master.MasterStatusTmpl;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.33"></a>
+<span class="sourceLineNo">034</span><a name="line.34"></a>
+<span class="sourceLineNo">035</span>/**<a name="line.35"></a>
+<span class="sourceLineNo">036</span> * The servlet responsible for rendering the index page of the<a name="line.36"></a>
+<span class="sourceLineNo">037</span> * master.<a name="line.37"></a>
+<span class="sourceLineNo">038</span> */<a name="line.38"></a>
+<span class="sourceLineNo">039</span>@InterfaceAudience.Private<a name="line.39"></a>
+<span class="sourceLineNo">040</span>public class MasterStatusServlet extends HttpServlet {<a name="line.40"></a>
+<span class="sourceLineNo">041</span>  private static final long serialVersionUID = 1L;<a name="line.41"></a>
+<span class="sourceLineNo">042</span><a name="line.42"></a>
+<span class="sourceLineNo">043</span>  @Override<a name="line.43"></a>
+<span class="sourceLineNo">044</span>  public void doGet(HttpServletRequest request, HttpServletResponse response)<a name="line.44"></a>
+<span class="sourceLineNo">045</span>    throws IOException<a name="line.45"></a>
+<span class="sourceLineNo">046</span>  {<a name="line.46"></a>
+<span class="sourceLineNo">047</span>    HMaster master = (HMaster) getServletContext().getAttribute(HMaster.MASTER);<a name="line.47"></a>
+<span class="sourceLineNo">048</span>    assert master != null : "No Master in context!";<a name="line.48"></a>
+<span class="sourceLineNo">049</span><a name="line.49"></a>
+<span class="sourceLineNo">050</span>    response.setContentType("text/html");<a name="line.50"></a>
 <span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>    response.setContentType("text/html");<a name="line.52"></a>
+<span class="sourceLineNo">052</span>    Configuration conf = master.getConfiguration();<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>    Configuration conf = master.getConfiguration();<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>    Map&lt;String, Integer&gt; frags = getFragmentationInfo(master, conf);<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    ServerName metaLocation = null;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>    List&lt;ServerName&gt; servers = null;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>    Set&lt;ServerName&gt; deadServers = null;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>    <a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if(master.isActiveMaster()) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      metaLocation = getMetaLocationOrNull(master);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>      ServerManager serverManager = master.getServerManager();<a name="line.63"></a>
-<span class="sourceLineNo">064</span>      if (serverManager != null) {<a name="line.64"></a>
-<span class="sourceLineNo">065</span>        deadServers = serverManager.getDeadServers().copyServerNames();<a name="line.65"></a>
-<span class="sourceLineNo">066</span>        servers = serverManager.getOnlineServersList();<a name="line.66"></a>
-<span class="sourceLineNo">067</span>      }<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    }<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>    MasterStatusTmpl tmpl = new MasterStatusTmpl()<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      .setFrags(frags)<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      .setMetaLocation(metaLocation)<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      .setServers(servers)<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      .setDeadServers(deadServers)<a name="line.74"></a>
-<span class="sourceLineNo">075</span>      .setCatalogJanitorEnabled(master.isCatalogJanitorEnabled());<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>    if (request.getParameter("filter") != null)<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      tmpl.setFilter(request.getParameter("filter"));<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    if (request.getParameter("format") != null)<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      tmpl.setFormat(request.getParameter("format"));<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    tmpl.render(response.getWriter(), master);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  private ServerName getMetaLocationOrNull(HMaster master) {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    MetaTableLocator metaTableLocator = master.getMetaTableLocator();<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    return metaTableLocator == null ? null :<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      metaTableLocator.getMetaRegionLocation(master.getZooKeeper());<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  }<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>  private Map&lt;String, Integer&gt; getFragmentationInfo(<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      HMaster master, Configuration conf) throws IOException {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    boolean showFragmentation = conf.getBoolean(<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        "hbase.master.ui.fragmentation.enabled", false);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    if (showFragmentation) {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      return FSUtils.getTableFragmentation(master);<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    } else {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      return null;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
-<span class="sourceLineNo">100</span>}<a name="line.100"></a>
+<span class="sourceLineNo">054</span>    Map&lt;String, Integer&gt; frags = getFragmentationInfo(master, conf);<a name="line.54"></a>
+<span class="sourceLineNo">055</span>    ServerName metaLocation = null;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>    List&lt;ServerName&gt; servers = null;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>    Set&lt;ServerName&gt; deadServers = null;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>    <a name="line.58"></a>
+<span class="sourceLineNo">059</span>    if(master.isActiveMaster()) {<a name="line.59"></a>
+<span class="sourceLineNo">060</span>      metaLocation = getMetaLocationOrNull(master);<a name="line.60"></a>
+<span class="sourceLineNo">061</span>      ServerManager serverManager = master.getServerManager();<a name="line.61"></a>
+<span class="sourceLineNo">062</span>      if (serverManager != null) {<a name="line.62"></a>
+<span class="sourceLineNo">063</span>        deadServers = serverManager.getDeadServers().copyServerNames();<a name="line.63"></a>
+<span class="sourceLineNo">064</span>        servers = serverManager.getOnlineServersList();<a name="line.64"></a>
+<span class="sourceLineNo">065</span>      }<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    }<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>    MasterStatusTmpl tmpl = new MasterStatusTmpl()<a name="line.68"></a>
+<span class="sourceLineNo">069</span>      .setFrags(frags)<a name="line.69"></a>
+<span class="sourceLineNo">070</span>      .setMetaLocation(metaLocation)<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      .setServers(servers)<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      .setDeadServers(deadServers)<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      .setCatalogJanitorEnabled(master.isCatalogJanitorEnabled());<a name="line.73"></a>
+<span class="sourceLineNo">074</span><a name="line.74"></a>
+<span class="sourceLineNo">075</span>    if (request.getParameter("filter") != null)<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      tmpl.setFilter(request.getParameter("filter"));<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    if (request.getParameter("format") != null)<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      tmpl.setFormat(request.getParameter("format"));<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    tmpl.render(response.getWriter(), master);<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
+<span class="sourceLineNo">081</span><a name="line.81"></a>
+<span class="sourceLineNo">082</span>  private ServerName getMetaLocationOrNull(HMaster master) {<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    return MetaTableLocator.getMetaRegionLocation(master.getZooKeeper());<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  private Map&lt;String, Integer&gt; getFragmentationInfo(<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      HMaster master, Configuration conf) throws IOException {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    boolean showFragmentation = conf.getBoolean(<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        "hbase.master.ui.fragmentation.enabled", false);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    if (showFragmentation) {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      return FSUtils.getTableFragmentation(master);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    } else {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>      return null;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    }<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
+<span class="sourceLineNo">096</span>}<a name="line.96"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.Predicate.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.Predicate.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.Predicate.html
index 079a696..cd769c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.Predicate.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.Predicate.html
@@ -33,16 +33,16 @@
 <span class="sourceLineNo">025</span>import java.util.concurrent.Future;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.TimeUnit;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.TimeoutException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.conf.Configuration;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.master.assignment.RegionStates;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.quotas.MasterQuotaManager;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.37"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.master.assignment.RegionStates;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.quotas.MasterQuotaManager;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.37"></a>
 <span class="sourceLineNo">038</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.slf4j.Logger;<a name="line.40"></a>
@@ -233,12 +233,12 @@
 <span class="sourceLineNo">225</span>  protected static void waitMetaRegions(final MasterProcedureEnv env) throws IOException {<a name="line.225"></a>
 <span class="sourceLineNo">226</span>    int timeout = env.getMasterConfiguration().getInt("hbase.client.catalog.timeout", 10000);<a name="line.226"></a>
 <span class="sourceLineNo">227</span>    try {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      if (env.getMasterServices().getMetaTableLocator().waitMetaRegionLocation(<a name="line.228"></a>
-<span class="sourceLineNo">229</span>            env.getMasterServices().getZooKeeper(), timeout) == null) {<a name="line.229"></a>
+<span class="sourceLineNo">228</span>      if (MetaTableLocator.waitMetaRegionLocation(env.getMasterServices().getZooKeeper(),<a name="line.228"></a>
+<span class="sourceLineNo">229</span>        timeout) == null) {<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        throw new NotAllMetaRegionsOnlineException();<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      }<a name="line.231"></a>
 <span class="sourceLineNo">232</span>    } catch (InterruptedException e) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.233"></a>
+<span class="sourceLineNo">233</span>      throw (InterruptedIOException) new InterruptedIOException().initCause(e);<a name="line.233"></a>
 <span class="sourceLineNo">234</span>    }<a name="line.234"></a>
 <span class="sourceLineNo">235</span>  }<a name="line.235"></a>
 <span class="sourceLineNo">236</span><a name="line.236"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.ProcedureFuture.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.ProcedureFuture.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.ProcedureFuture.html
index 079a696..cd769c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.ProcedureFuture.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.ProcedureFuture.html
@@ -33,16 +33,16 @@
 <span class="sourceLineNo">025</span>import java.util.concurrent.Future;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.TimeUnit;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.TimeoutException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.conf.Configuration;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.master.assignment.RegionStates;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.quotas.MasterQuotaManager;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.37"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.master.assignment.RegionStates;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.quotas.MasterQuotaManager;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.37"></a>
 <span class="sourceLineNo">038</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.slf4j.Logger;<a name="line.40"></a>
@@ -233,12 +233,12 @@
 <span class="sourceLineNo">225</span>  protected static void waitMetaRegions(final MasterProcedureEnv env) throws IOException {<a name="line.225"></a>
 <span class="sourceLineNo">226</span>    int timeout = env.getMasterConfiguration().getInt("hbase.client.catalog.timeout", 10000);<a name="line.226"></a>
 <span class="sourceLineNo">227</span>    try {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      if (env.getMasterServices().getMetaTableLocator().waitMetaRegionLocation(<a name="line.228"></a>
-<span class="sourceLineNo">229</span>            env.getMasterServices().getZooKeeper(), timeout) == null) {<a name="line.229"></a>
+<span class="sourceLineNo">228</span>      if (MetaTableLocator.waitMetaRegionLocation(env.getMasterServices().getZooKeeper(),<a name="line.228"></a>
+<span class="sourceLineNo">229</span>        timeout) == null) {<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        throw new NotAllMetaRegionsOnlineException();<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      }<a name="line.231"></a>
 <span class="sourceLineNo">232</span>    } catch (InterruptedException e) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.233"></a>
+<span class="sourceLineNo">233</span>      throw (InterruptedIOException) new InterruptedIOException().initCause(e);<a name="line.233"></a>
 <span class="sourceLineNo">234</span>    }<a name="line.234"></a>
 <span class="sourceLineNo">235</span>  }<a name="line.235"></a>
 <span class="sourceLineNo">236</span><a name="line.236"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.html
index 079a696..cd769c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.html
@@ -33,16 +33,16 @@
 <span class="sourceLineNo">025</span>import java.util.concurrent.Future;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.TimeUnit;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.TimeoutException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.conf.Configuration;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.master.assignment.RegionStates;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.quotas.MasterQuotaManager;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.37"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.master.assignment.RegionStates;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.quotas.MasterQuotaManager;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.37"></a>
 <span class="sourceLineNo">038</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.39"></a>
 <span class="sourceLineNo">040</span>import org.slf4j.Logger;<a name="line.40"></a>
@@ -233,12 +233,12 @@
 <span class="sourceLineNo">225</span>  protected static void waitMetaRegions(final MasterProcedureEnv env) throws IOException {<a name="line.225"></a>
 <span class="sourceLineNo">226</span>    int timeout = env.getMasterConfiguration().getInt("hbase.client.catalog.timeout", 10000);<a name="line.226"></a>
 <span class="sourceLineNo">227</span>    try {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      if (env.getMasterServices().getMetaTableLocator().waitMetaRegionLocation(<a name="line.228"></a>
-<span class="sourceLineNo">229</span>            env.getMasterServices().getZooKeeper(), timeout) == null) {<a name="line.229"></a>
+<span class="sourceLineNo">228</span>      if (MetaTableLocator.waitMetaRegionLocation(env.getMasterServices().getZooKeeper(),<a name="line.228"></a>
+<span class="sourceLineNo">229</span>        timeout) == null) {<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        throw new NotAllMetaRegionsOnlineException();<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      }<a name="line.231"></a>
 <span class="sourceLineNo">232</span>    } catch (InterruptedException e) {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      throw (InterruptedIOException)new InterruptedIOException().initCause(e);<a name="line.233"></a>
+<span class="sourceLineNo">233</span>      throw (InterruptedIOException) new InterruptedIOException().initCause(e);<a name="line.233"></a>
 <span class="sourceLineNo">234</span>    }<a name="line.234"></a>
 <span class="sourceLineNo">235</span>  }<a name="line.235"></a>
 <span class="sourceLineNo">236</span><a name="line.236"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.html
index 9a3dd7c..ed60cc5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.html
@@ -29,27 +29,27 @@
 <span class="sourceLineNo">021</span>import java.util.List;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import java.util.Map;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.Set;<a name="line.23"></a>
-<span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hadoop.fs.FileSystem;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.fs.Path;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.TableName;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.slf4j.Logger;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.slf4j.LoggerFactory;<a name="line.44"></a>
+<span class="sourceLineNo">024</span>import org.apache.hadoop.fs.FileSystem;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import org.apache.hadoop.fs.Path;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.TableName;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.mob.MobUtils;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.slf4j.Logger;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.slf4j.LoggerFactory;<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
 <span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.45"></a>
 <span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.47"></a>
@@ -167,7 +167,7 @@
 <span class="sourceLineNo">159</span>  private void verifyRegions(final SnapshotManifest manifest) throws IOException {<a name="line.159"></a>
 <span class="sourceLineNo">160</span>    List&lt;RegionInfo&gt; regions;<a name="line.160"></a>
 <span class="sourceLineNo">161</span>    if (TableName.META_TABLE_NAME.equals(tableName)) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      regions = new MetaTableLocator().getMetaRegions(services.getZooKeeper());<a name="line.162"></a>
+<span class="sourceLineNo">162</span>      regions = MetaTableLocator.getMetaRegions(services.getZooKeeper());<a name="line.162"></a>
 <span class="sourceLineNo">163</span>    } else {<a name="line.163"></a>
 <span class="sourceLineNo">164</span>      regions = MetaTableAccessor.getTableRegions(services.getConnection(), tableName);<a name="line.164"></a>
 <span class="sourceLineNo">165</span>    }<a name="line.165"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
index 02ec363..9f9e270 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.html
@@ -32,42 +32,42 @@
 <span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.util.Set;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.CancellationException;<a name="line.26"></a>
-<span class="sourceLineNo">027</span><a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FileSystem;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FileUtil;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.ServerName;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.TableName;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.errorhandling.ForeignException;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.executor.EventHandler;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.executor.EventType;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.master.MetricsSnapshot;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.master.SnapshotSentinel;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.master.locking.LockManager;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.master.locking.LockManager.MasterLock;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.procedure2.LockType;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.zookeeper.KeeperException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.slf4j.Logger;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.slf4j.LoggerFactory;<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.62"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.fs.FileSystem;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FileUtil;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.Path;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.ServerName;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.TableName;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.errorhandling.ForeignException;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.executor.EventHandler;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.executor.EventType;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.master.MetricsSnapshot;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.master.SnapshotSentinel;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.master.locking.LockManager;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.master.locking.LockManager.MasterLock;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.procedure2.LockType;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.zookeeper.KeeperException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.slf4j.Logger;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.slf4j.LoggerFactory;<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
 <span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.63"></a>
 <span class="sourceLineNo">064</span><a name="line.64"></a>
 <span class="sourceLineNo">065</span>/**<a name="line.65"></a>
@@ -201,7 +201,7 @@
 <span class="sourceLineNo">193</span><a name="line.193"></a>
 <span class="sourceLineNo">194</span>      List&lt;Pair&lt;RegionInfo, ServerName&gt;&gt; regionsAndLocations;<a name="line.194"></a>
 <span class="sourceLineNo">195</span>      if (TableName.META_TABLE_NAME.equals(snapshotTable)) {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>        regionsAndLocations = new MetaTableLocator().getMetaRegionsAndLocations(<a name="line.196"></a>
+<span class="sourceLineNo">196</span>        regionsAndLocations = MetaTableLocator.getMetaRegionsAndLocations(<a name="line.196"></a>
 <span class="sourceLineNo">197</span>          server.getZooKeeper());<a name="line.197"></a>
 <span class="sourceLineNo">198</span>      } else {<a name="line.198"></a>
 <span class="sourceLineNo">199</span>        regionsAndLocations = MetaTableAccessor.getTableRegionsAndLocations(<a name="line.199"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.html b/devapidocs/src-html/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.html
index 74563f6..dc3d4e8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.html
@@ -32,33 +32,33 @@
 <span class="sourceLineNo">024</span>import java.util.Map;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.util.Set;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.ThreadPoolExecutor;<a name="line.26"></a>
-<span class="sourceLineNo">027</span><a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseInterfaceAudience;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.ServerName;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.TableName;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.errorhandling.ForeignException;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.master.MasterCoprocessorHost;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.master.MetricsMaster;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.procedure.MasterProcedureManager;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.procedure.Procedure;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.procedure.ZKProcedureCoordinator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.security.User;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.zookeeper.KeeperException;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.slf4j.Logger;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.slf4j.LoggerFactory;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.53"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HBaseInterfaceAudience;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.MetaTableAccessor;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.ServerName;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.TableName;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.errorhandling.ForeignException;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.master.MasterCoprocessorHost;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.master.MetricsMaster;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.procedure.MasterProcedureManager;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.procedure.Procedure;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.procedure.ZKProcedureCoordinator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.security.User;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.zookeeper.KeeperException;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
+<span class="sourceLineNo">051</span><a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.52"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>
 <span class="sourceLineNo">056</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.56"></a>
@@ -139,7 +139,7 @@
 <span class="sourceLineNo">131</span>    List&lt;Pair&lt;RegionInfo, ServerName&gt;&gt; regionsAndLocations;<a name="line.131"></a>
 <span class="sourceLineNo">132</span><a name="line.132"></a>
 <span class="sourceLineNo">133</span>    if (TableName.META_TABLE_NAME.equals(tableName)) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      regionsAndLocations = new MetaTableLocator().getMetaRegionsAndLocations(<a name="line.134"></a>
+<span class="sourceLineNo">134</span>      regionsAndLocations = MetaTableLocator.getMetaRegionsAndLocations(<a name="line.134"></a>
 <span class="sourceLineNo">135</span>        master.getZooKeeper());<a name="line.135"></a>
 <span class="sourceLineNo">136</span>    } else {<a name="line.136"></a>
 <span class="sourceLineNo">137</span>      regionsAndLocations = MetaTableAccessor.getTableRegionsAndLocations(<a name="line.137"></a>


[31/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
index 2cdee19..e6bc675 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
@@ -110,2406 +110,2407 @@
 <span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.util.ForeignExceptionUtil;<a name="line.102"></a>
 <span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.103"></a>
 <span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.zookeeper.KeeperException;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.slf4j.Logger;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.slf4j.LoggerFactory;<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;<a name="line.181"></a>
-<span class="sourceLineNo">182</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;<a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.HbckService;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;<a name="line.201"></a>
-<span class="sourceLineNo">202</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersRequest;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersResponse;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;<a name="line.211"></a>
-<span class="sourceLineNo">212</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;<a name="line.213"></a>
-<span class="sourceLineNo">214</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;<a name="line.216"></a>
-<span class="sourceLineNo">217</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest;<a name="line.217"></a>
-<span class="sourceLineNo">218</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest;<a name="line.223"></a>
-<span class="sourceLineNo">224</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse;<a name="line.224"></a>
-<span class="sourceLineNo">225</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerRequest;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerResponse;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse;<a name="line.234"></a>
-<span class="sourceLineNo">235</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest;<a name="line.237"></a>
-<span class="sourceLineNo">238</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;<a name="line.239"></a>
-<span class="sourceLineNo">240</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;<a name="line.240"></a>
-<span class="sourceLineNo">241</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest;<a name="line.243"></a>
-<span class="sourceLineNo">244</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse;<a name="line.244"></a>
-<span class="sourceLineNo">245</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;<a name="line.245"></a>
-<span class="sourceLineNo">246</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetTableStateInMetaRequest;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;<a name="line.261"></a>
-<span class="sourceLineNo">262</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;<a name="line.262"></a>
-<span class="sourceLineNo">263</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationRequest;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationResponse;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;<a name="line.275"></a>
-<span class="sourceLineNo">276</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RemoteProcedureResult;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneRequest;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneResponse;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;<a name="line.288"></a>
-<span class="sourceLineNo">289</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest;<a name="line.292"></a>
-<span class="sourceLineNo">293</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationState;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateRequest;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateResponse;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>/**<a name="line.309"></a>
-<span class="sourceLineNo">310</span> * Implements the master RPC services.<a name="line.310"></a>
-<span class="sourceLineNo">311</span> */<a name="line.311"></a>
-<span class="sourceLineNo">312</span>@InterfaceAudience.Private<a name="line.312"></a>
-<span class="sourceLineNo">313</span>@SuppressWarnings("deprecation")<a name="line.313"></a>
-<span class="sourceLineNo">314</span>public class MasterRpcServices extends RSRpcServices<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      implements MasterService.BlockingInterface, RegionServerStatusService.BlockingInterface,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        LockService.BlockingInterface, HbckService.BlockingInterface {<a name="line.316"></a>
-<span class="sourceLineNo">317</span>  private static final Logger LOG = LoggerFactory.getLogger(MasterRpcServices.class.getName());<a name="line.317"></a>
-<span class="sourceLineNo">318</span><a name="line.318"></a>
-<span class="sourceLineNo">319</span>  private final HMaster master;<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @return Subset of configuration to pass initializing regionservers: e.g.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * the filesystem to use and root directory to use.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   */<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  private RegionServerStartupResponse.Builder createConfigurationSubset() {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    RegionServerStartupResponse.Builder resp = addConfig(<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      RegionServerStartupResponse.newBuilder(), HConstants.HBASE_DIR);<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    resp = addConfig(resp, "fs.defaultFS");<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    return addConfig(resp, "hbase.master.info.port");<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private RegionServerStartupResponse.Builder addConfig(<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      final RegionServerStartupResponse.Builder resp, final String key) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    NameStringPair.Builder entry = NameStringPair.newBuilder()<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      .setName(key)<a name="line.335"></a>
-<span class="sourceLineNo">336</span>      .setValue(master.getConfiguration().get(key));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    resp.addMapEntries(entry.build());<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    return resp;<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  public MasterRpcServices(HMaster m) throws IOException {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    super(m);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    master = m;<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  @Override<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  protected Class&lt;?&gt; getRpcSchedulerFactoryClass() {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    Configuration conf = getConfiguration();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    if (conf != null) {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>      return conf.getClass(MASTER_RPC_SCHEDULER_FACTORY_CLASS, super.getRpcSchedulerFactoryClass());<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    } else {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return super.getRpcSchedulerFactoryClass();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  @Override<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  protected RpcServerInterface createRpcServer(Server server, Configuration conf,<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      RpcSchedulerFactory rpcSchedulerFactory, InetSocketAddress bindAddress, String name)<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      throws IOException {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    // RpcServer at HM by default enable ByteBufferPool iff HM having user table region in it<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    boolean reservoirEnabled = conf.getBoolean(RESERVOIR_ENABLED_KEY,<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        LoadBalancer.isMasterCanHostUserRegions(conf));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    try {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return RpcServerFactory.createRpcServer(server, name, getServices(),<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          bindAddress, // use final bindAddress for this server.<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          conf, rpcSchedulerFactory.create(conf, this, server), reservoirEnabled);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    } catch (BindException be) {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      throw new IOException(be.getMessage() + ". To switch ports use the '"<a name="line.368"></a>
-<span class="sourceLineNo">369</span>          + HConstants.MASTER_PORT + "' configuration property.",<a name="line.369"></a>
-<span class="sourceLineNo">370</span>          be.getCause() != null ? be.getCause() : be);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    }<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  @Override<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  protected PriorityFunction createPriority() {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    return new MasterAnnotationReadingPriorityFunction(this);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  }<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /**<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Checks for the following pre-checks in order:<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   * &lt;ol&gt;<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   *   &lt;li&gt;Master is initialized&lt;/li&gt;<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   *   &lt;li&gt;Rpc caller has admin permissions&lt;/li&gt;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * &lt;/ol&gt;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @param requestName name of rpc request. Used in reporting failures to provide context.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * @throws ServiceException If any of the above listed pre-check fails.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  private void rpcPreCheck(String requestName) throws ServiceException {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    try {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      master.checkInitialized();<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      requirePermission(requestName, Permission.Action.ADMIN);<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    } catch (IOException ioe) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      throw new ServiceException(ioe);<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
-<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>  enum BalanceSwitchMode {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    SYNC,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    ASYNC<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>  /**<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * Assigns balancer switch according to BalanceSwitchMode<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * @param b new balancer switch<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param mode BalanceSwitchMode<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @return old balancer switch<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  boolean switchBalancer(final boolean b, BalanceSwitchMode mode) throws IOException {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    boolean oldValue = master.loadBalancerTracker.isBalancerOn();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    boolean newValue = b;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    try {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      if (master.cpHost != null) {<a name="line.412"></a>
-<span class="sourceLineNo">413</span>        master.cpHost.preBalanceSwitch(newValue);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      }<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      try {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>        if (mode == BalanceSwitchMode.SYNC) {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>          synchronized (master.getLoadBalancer()) {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>            master.loadBalancerTracker.setBalancerOn(newValue);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>          }<a name="line.419"></a>
-<span class="sourceLineNo">420</span>        } else {<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          master.loadBalancerTracker.setBalancerOn(newValue);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>      } catch (KeeperException ke) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        throw new IOException(ke);<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
-<span class="sourceLineNo">426</span>      LOG.info(master.getClientIdAuditPrefix() + " set balanceSwitch=" + newValue);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      if (master.cpHost != null) {<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        master.cpHost.postBalanceSwitch(oldValue, newValue);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      }<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      master.getLoadBalancer().updateBalancerStatus(newValue);<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } catch (IOException ioe) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      LOG.warn("Error flipping balance switch", ioe);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    return oldValue;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>  boolean synchronousBalanceSwitch(final boolean b) throws IOException {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    return switchBalancer(b, BalanceSwitchMode.SYNC);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  }<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /**<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @return list of blocking services and their security info classes that this server supports<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  @Override<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  protected List&lt;BlockingServiceAndInterface&gt; getServices() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    List&lt;BlockingServiceAndInterface&gt; bssi = new ArrayList&lt;&gt;(5);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    bssi.add(new BlockingServiceAndInterface(<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      MasterService.newReflectiveBlockingService(this),<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      MasterService.BlockingInterface.class));<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    bssi.add(new BlockingServiceAndInterface(<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      RegionServerStatusService.newReflectiveBlockingService(this),<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      RegionServerStatusService.BlockingInterface.class));<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    bssi.add(new BlockingServiceAndInterface(LockService.newReflectiveBlockingService(this),<a name="line.453"></a>
-<span class="sourceLineNo">454</span>        LockService.BlockingInterface.class));<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    bssi.add(new BlockingServiceAndInterface(HbckService.newReflectiveBlockingService(this),<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        HbckService.BlockingInterface.class));<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    bssi.addAll(super.getServices());<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    return bssi;<a name="line.458"></a>
-<span class="sourceLineNo">459</span>  }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>  @Override<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  @QosPriority(priority = HConstants.ADMIN_QOS)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  public GetLastFlushedSequenceIdResponse getLastFlushedSequenceId(RpcController controller,<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      GetLastFlushedSequenceIdRequest request) throws ServiceException {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    try {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      master.checkServiceStarted();<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    } catch (IOException ioe) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      throw new ServiceException(ioe);<a name="line.468"></a>
-<span class="sourceLineNo">469</span>    }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    byte[] encodedRegionName = request.getRegionName().toByteArray();<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    RegionStoreSequenceIds ids = master.getServerManager()<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      .getLastFlushedSequenceId(encodedRegionName);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    return ResponseConverter.buildGetLastFlushedSequenceIdResponse(ids);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  }<a name="line.474"></a>
-<span class="sourceLineNo">475</span><a name="line.475"></a>
-<span class="sourceLineNo">476</span>  @Override<a name="line.476"></a>
-<span class="sourceLineNo">477</span>  public RegionServerReportResponse regionServerReport(RpcController controller,<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      RegionServerReportRequest request) throws ServiceException {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    try {<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      master.checkServiceStarted();<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      int versionNumber = 0;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      String version = "0.0.0";<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      VersionInfo versionInfo = VersionInfoUtil.getCurrentClientVersionInfo();<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      if (versionInfo != null) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        version = versionInfo.getVersion();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        versionNumber = VersionInfoUtil.getVersionNumber(versionInfo);<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      }<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      ClusterStatusProtos.ServerLoad sl = request.getLoad();<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      ServerName serverName = ProtobufUtil.toServerName(request.getServer());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>      ServerMetrics oldLoad = master.getServerManager().getLoad(serverName);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      ServerMetrics newLoad =<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        ServerMetricsBuilder.toServerMetrics(serverName, versionNumber, version, sl);<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      master.getServerManager().regionServerReport(serverName, newLoad);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      master.getAssignmentManager().reportOnlineRegions(serverName,<a name="line.494"></a>
-<span class="sourceLineNo">495</span>        newLoad.getRegionMetrics().keySet());<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      if (sl != null &amp;&amp; master.metricsMaster != null) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        // Up our metrics.<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        master.metricsMaster.incrementRequests(<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          sl.getTotalNumberOfRequests() - (oldLoad != null ? oldLoad.getRequestCount() : 0));<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      }<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    } catch (IOException ioe) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      throw new ServiceException(ioe);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    }<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    return RegionServerReportResponse.newBuilder().build();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>  }<a name="line.505"></a>
-<span class="sourceLineNo">506</span><a name="line.506"></a>
-<span class="sourceLineNo">507</span>  @Override<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  public RegionServerStartupResponse regionServerStartup(RpcController controller,<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      RegionServerStartupRequest request) throws ServiceException {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    // Register with server manager<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    try {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      master.checkServiceStarted();<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      int versionNumber = 0;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      String version = "0.0.0";<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      VersionInfo versionInfo = VersionInfoUtil.getCurrentClientVersionInfo();<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      if (versionInfo != null) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        version = versionInfo.getVersion();<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        versionNumber = VersionInfoUtil.getVersionNumber(versionInfo);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      InetAddress ia = master.getRemoteInetAddress(request.getPort(), request.getServerStartCode());<a name="line.520"></a>
-<span class="sourceLineNo">521</span>      // if regionserver passed hostname to use,<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      // then use it instead of doing a reverse DNS lookup<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      ServerName rs =<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        master.getServerManager().regionServerStartup(request, versionNumber, version, ia);<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>      // Send back some config info<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      RegionServerStartupResponse.Builder resp = createConfigurationSubset();<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      NameStringPair.Builder entry = NameStringPair.newBuilder()<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        .setName(HConstants.KEY_FOR_HOSTNAME_SEEN_BY_MASTER).setValue(rs.getHostname());<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      resp.addMapEntries(entry.build());<a name="line.530"></a>
-<span class="sourceLineNo">531</span><a name="line.531"></a>
-<span class="sourceLineNo">532</span>      return resp.build();<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    } catch (IOException ioe) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throw new ServiceException(ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  @Override<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  public ReportRSFatalErrorResponse reportRSFatalError(<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      RpcController controller, ReportRSFatalErrorRequest request) throws ServiceException {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    String errorText = request.getErrorMessage();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    ServerName sn = ProtobufUtil.toServerName(request.getServer());<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    String msg = sn + " reported a fatal error:\n" + errorText;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    LOG.warn(msg);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    master.rsFatals.add(msg);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return ReportRSFatalErrorResponse.newBuilder().build();<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>  public AddColumnResponse addColumn(RpcController controller,<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      AddColumnRequest req) throws ServiceException {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      long procId = master.addColumn(<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          ProtobufUtil.toTableName(req.getTableName()),<a name="line.554"></a>
-<span class="sourceLineNo">555</span>          ProtobufUtil.toColumnFamilyDescriptor(req.getColumnFamilies()),<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          req.getNonceGroup(),<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          req.getNonce());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      if (procId == -1) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.559"></a>
-<span class="sourceLineNo">560</span>        return AddColumnResponse.newBuilder().build();<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      } else {<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        return AddColumnResponse.newBuilder().setProcId(procId).build();<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      }<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    } catch (IOException ioe) {<a name="line.564"></a>
-<span class="sourceLineNo">565</span>      throw new ServiceException(ioe);<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    }<a name="line.566"></a>
-<span class="sourceLineNo">567</span>  }<a name="line.567"></a>
-<span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>  @Override<a name="line.569"></a>
-<span class="sourceLineNo">570</span>  public AssignRegionResponse assignRegion(RpcController controller,<a name="line.570"></a>
-<span class="sourceLineNo">571</span>      AssignRegionRequest req) throws ServiceException {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    try {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      master.checkInitialized();<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>      final RegionSpecifierType type = req.getRegion().getType();<a name="line.575"></a>
-<span class="sourceLineNo">576</span>      if (type != RegionSpecifierType.REGION_NAME) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>        LOG.warn("assignRegion specifier type: expected: " + RegionSpecifierType.REGION_NAME<a name="line.577"></a>
-<span class="sourceLineNo">578</span>          + " actual: " + type);<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>      final byte[] regionName = req.getRegion().getValue().toByteArray();<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      final RegionInfo regionInfo = master.getAssignmentManager().getRegionInfo(regionName);<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      if (regionInfo == null) throw new UnknownRegionException(Bytes.toStringBinary(regionName));<a name="line.583"></a>
-<span class="sourceLineNo">584</span><a name="line.584"></a>
-<span class="sourceLineNo">585</span>      final AssignRegionResponse arr = AssignRegionResponse.newBuilder().build();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      if (master.cpHost != null) {<a name="line.586"></a>
-<span class="sourceLineNo">587</span>        master.cpHost.preAssign(regionInfo);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      }<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      LOG.info(master.getClientIdAuditPrefix() + " assign " + regionInfo.getRegionNameAsString());<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      master.getAssignmentManager().assign(regionInfo);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      if (master.cpHost != null) {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>        master.cpHost.postAssign(regionInfo);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      }<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      return arr;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    } catch (IOException ioe) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      throw new ServiceException(ioe);<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    }<a name="line.597"></a>
-<span class="sourceLineNo">598</span>  }<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.zookeeper.KeeperException;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.slf4j.Logger;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.slf4j.LoggerFactory;<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.113"></a>
+<span class="sourceLineNo">114</span><a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;<a name="line.180"></a>
+<span class="sourceLineNo">181</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.HbckService;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;<a name="line.190"></a>
+<span class="sourceLineNo">191</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;<a name="line.191"></a>
+<span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;<a name="line.201"></a>
+<span class="sourceLineNo">202</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;<a name="line.202"></a>
+<span class="sourceLineNo">203</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;<a name="line.203"></a>
+<span class="sourceLineNo">204</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;<a name="line.204"></a>
+<span class="sourceLineNo">205</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;<a name="line.205"></a>
+<span class="sourceLineNo">206</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersRequest;<a name="line.206"></a>
+<span class="sourceLineNo">207</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersResponse;<a name="line.207"></a>
+<span class="sourceLineNo">208</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;<a name="line.208"></a>
+<span class="sourceLineNo">209</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;<a name="line.209"></a>
+<span class="sourceLineNo">210</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;<a name="line.211"></a>
+<span class="sourceLineNo">212</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse;<a name="line.213"></a>
+<span class="sourceLineNo">214</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;<a name="line.214"></a>
+<span class="sourceLineNo">215</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;<a name="line.215"></a>
+<span class="sourceLineNo">216</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;<a name="line.222"></a>
+<span class="sourceLineNo">223</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse;<a name="line.223"></a>
+<span class="sourceLineNo">224</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest;<a name="line.224"></a>
+<span class="sourceLineNo">225</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest;<a name="line.226"></a>
+<span class="sourceLineNo">227</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest;<a name="line.228"></a>
+<span class="sourceLineNo">229</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest;<a name="line.230"></a>
+<span class="sourceLineNo">231</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse;<a name="line.231"></a>
+<span class="sourceLineNo">232</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerRequest;<a name="line.232"></a>
+<span class="sourceLineNo">233</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerResponse;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest;<a name="line.234"></a>
+<span class="sourceLineNo">235</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse;<a name="line.235"></a>
+<span class="sourceLineNo">236</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest;<a name="line.238"></a>
+<span class="sourceLineNo">239</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse;<a name="line.239"></a>
+<span class="sourceLineNo">240</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;<a name="line.240"></a>
+<span class="sourceLineNo">241</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;<a name="line.241"></a>
+<span class="sourceLineNo">242</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest;<a name="line.242"></a>
+<span class="sourceLineNo">243</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse;<a name="line.243"></a>
+<span class="sourceLineNo">244</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest;<a name="line.244"></a>
+<span class="sourceLineNo">245</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetTableStateInMetaRequest;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;<a name="line.257"></a>
+<span class="sourceLineNo">258</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;<a name="line.258"></a>
+<span class="sourceLineNo">259</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;<a name="line.259"></a>
+<span class="sourceLineNo">260</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;<a name="line.264"></a>
+<span class="sourceLineNo">265</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest;<a name="line.265"></a>
+<span class="sourceLineNo">266</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes;<a name="line.271"></a>
+<span class="sourceLineNo">272</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationRequest;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationResponse;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;<a name="line.279"></a>
+<span class="sourceLineNo">280</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RemoteProcedureResult;<a name="line.284"></a>
+

<TRUNCATED>

[25/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
index ea05301..26a93dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
@@ -269,3590 +269,3574 @@
 <span class="sourceLineNo">261</span>   */<a name="line.261"></a>
 <span class="sourceLineNo">262</span>  protected ClusterConnection clusterConnection;<a name="line.262"></a>
 <span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /*<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Long-living meta table locator, which is created when the server is started and stopped<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * when server shuts down. References to this locator shall be used to perform according<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * operations in EventHandlers. Primary reason for this decision is to make it mockable<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * for tests.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  protected MetaTableLocator metaTableLocator;<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * Go here to get table descriptors.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  protected TableDescriptors tableDescriptors;<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>  // Replication services. If no replication, this handler will be null.<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  // Compactions<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public CompactSplit compactSplitThread;<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Map of regions currently being served by this region server. Key is the<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * encoded region name.  All access should be synchronized.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /**<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * and here we really mean DataNode locations.<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  // Leases<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  protected Leases leases;<a name="line.303"></a>
+<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * Go here to get table descriptors.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  protected TableDescriptors tableDescriptors;<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>  // Replication services. If no replication, this handler will be null.<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  // Compactions<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public CompactSplit compactSplitThread;<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /**<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * Map of regions currently being served by this region server. Key is the<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * encoded region name.  All access should be synchronized.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.280"></a>
+<span class="sourceLineNo">281</span><a name="line.281"></a>
+<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
+<span class="sourceLineNo">283</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * and here we really mean DataNode locations.<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   */<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  // Leases<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  protected Leases leases;<a name="line.295"></a>
+<span class="sourceLineNo">296</span><a name="line.296"></a>
+<span class="sourceLineNo">297</span>  // Instance of the hbase executor executorService.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  protected ExecutorService executorService;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  // If false, the file system has become unavailable<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  protected volatile boolean fsOk;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  protected HFileSystem fs;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  protected HFileSystem walFs;<a name="line.303"></a>
 <span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>  // Instance of the hbase executor executorService.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  protected ExecutorService executorService;<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  // If false, the file system has become unavailable<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  protected volatile boolean fsOk;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  protected HFileSystem fs;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  protected HFileSystem walFs;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  // Set when a report to the master comes back with a message asking us to<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  // of HRegionServer in isolation.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private volatile boolean stopped = false;<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // debugging and unit tests.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private volatile boolean abortRequested;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  // Default abort timeout is 1200 seconds for safe<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  // Will run this task when abort timeout<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.325"></a>
+<span class="sourceLineNo">305</span>  // Set when a report to the master comes back with a message asking us to<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  // of HRegionServer in isolation.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private volatile boolean stopped = false;<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  // debugging and unit tests.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>  private volatile boolean abortRequested;<a name="line.312"></a>
+<span class="sourceLineNo">313</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  // Default abort timeout is 1200 seconds for safe<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  // Will run this task when abort timeout<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  // space regions.<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  private boolean stopping = false;<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>  volatile boolean killed = false;<a name="line.325"></a>
 <span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.327"></a>
+<span class="sourceLineNo">327</span>  private volatile boolean shutDown = false;<a name="line.327"></a>
 <span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  // space regions.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  private boolean stopping = false;<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  volatile boolean killed = false;<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  private volatile boolean shutDown = false;<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>  protected final Configuration conf;<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Path rootDir;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Path walRootDir;<a name="line.340"></a>
+<span class="sourceLineNo">329</span>  protected final Configuration conf;<a name="line.329"></a>
+<span class="sourceLineNo">330</span><a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private Path rootDir;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  private Path walRootDir;<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  final int numRetries;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  protected final int threadWakeFrequency;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  protected final int msgInterval;<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  protected final int numRegionsToReport;<a name="line.340"></a>
 <span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  final int numRetries;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  protected final int threadWakeFrequency;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected final int msgInterval;<a name="line.346"></a>
+<span class="sourceLineNo">342</span>  // Stub to do region server status calls against the master.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  RpcClient rpcClient;<a name="line.346"></a>
 <span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  protected final int numRegionsToReport;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  // Stub to do region server status calls against the master.<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  RpcClient rpcClient;<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.357"></a>
+<span class="sourceLineNo">348</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  // into web context.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  protected InfoServer infoServer;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  private JvmPauseMonitor pauseMonitor;<a name="line.357"></a>
 <span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  // into web context.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  protected InfoServer infoServer;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  private JvmPauseMonitor pauseMonitor;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** region server process name */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String REGIONSERVER = "regionserver";<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  MetricsRegionServer metricsRegionServer;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  MetricsTable metricsTable;<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  private SpanReceiverHost spanReceiverHost;<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private ChoreService choreService;<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /*<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Check for compactions requests.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  ScheduledChore compactionChecker;<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /*<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * Check for flushes<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  ScheduledChore periodicFlusher;<a name="line.387"></a>
-<span class="sourceLineNo">388</span><a name="line.388"></a>
-<span class="sourceLineNo">389</span>  protected volatile WALFactory walFactory;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // WAL roller. log is protected rather than private to avoid<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // eclipse warning when accessed by inner classes<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  protected LogRoller walRoller;<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // A thread which calls reportProcedureDone<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // flag set after we're done setting up server threads<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // zookeeper connection and watcher<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  protected final ZKWatcher zooKeeper;<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // master address tracker<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // Cluster Status Tracker<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  // Log Splitting Worker<a name="line.410"></a>
-<span class="sourceLineNo">411</span>  private SplitLogWorker splitLogWorker;<a name="line.411"></a>
+<span class="sourceLineNo">359</span>  /** region server process name */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public static final String REGIONSERVER = "regionserver";<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  MetricsRegionServer metricsRegionServer;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  MetricsTable metricsTable;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>  private SpanReceiverHost spanReceiverHost;<a name="line.364"></a>
+<span class="sourceLineNo">365</span><a name="line.365"></a>
+<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private ChoreService choreService;<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /*<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Check for compactions requests.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  ScheduledChore compactionChecker;<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  /*<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * Check for flushes<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   */<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  ScheduledChore periodicFlusher;<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  protected volatile WALFactory walFactory;<a name="line.381"></a>
+<span class="sourceLineNo">382</span><a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // WAL roller. log is protected rather than private to avoid<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // eclipse warning when accessed by inner classes<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected LogRoller walRoller;<a name="line.385"></a>
+<span class="sourceLineNo">386</span><a name="line.386"></a>
+<span class="sourceLineNo">387</span>  // A thread which calls reportProcedureDone<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.388"></a>
+<span class="sourceLineNo">389</span><a name="line.389"></a>
+<span class="sourceLineNo">390</span>  // flag set after we're done setting up server threads<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // zookeeper connection and watcher<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  protected final ZKWatcher zooKeeper;<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // master address tracker<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.397"></a>
+<span class="sourceLineNo">398</span><a name="line.398"></a>
+<span class="sourceLineNo">399</span>  // Cluster Status Tracker<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // Log Splitting Worker<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  private SplitLogWorker splitLogWorker;<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // A sleeper that sleeps for msgInterval.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  protected final Sleeper sleeper;<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private final int operationTimeout;<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  private final int shortOperationTimeout;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.411"></a>
 <span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>  // A sleeper that sleeps for msgInterval.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  protected final Sleeper sleeper;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  private final int operationTimeout;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  private final int shortOperationTimeout;<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.419"></a>
+<span class="sourceLineNo">413</span>  // Cache configuration and block cache reference<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  protected CacheConfig cacheConfig;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  // Cache configuration for mob<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  final MobCacheConfig mobCacheConfig;<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** The health check chore. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  private HealthCheckChore healthCheckChore;<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  // Cache configuration and block cache reference<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  protected CacheConfig cacheConfig;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  // Cache configuration for mob<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  final MobCacheConfig mobCacheConfig;<a name="line.424"></a>
+<span class="sourceLineNo">421</span>  /** The nonce manager chore. */<a name="line.421"></a>
+<span class="sourceLineNo">422</span>  private ScheduledChore nonceManagerChore;<a name="line.422"></a>
+<span class="sourceLineNo">423</span><a name="line.423"></a>
+<span class="sourceLineNo">424</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.424"></a>
 <span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>  /** The health check chore. */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>  private HealthCheckChore healthCheckChore;<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>  /** The nonce manager chore. */<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  private ScheduledChore nonceManagerChore;<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * against  Master.<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected ServerName serverName;<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /*<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * hostname specified by hostname config<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  protected String useThisHostnameInstead;<a name="line.444"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * against  Master.<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   */<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  protected ServerName serverName;<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /*<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * hostname specified by hostname config<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  protected String useThisHostnameInstead;<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  // key to the config parameter of server hostname<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  // both master and region server<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.441"></a>
+<span class="sourceLineNo">442</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.444"></a>
 <span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  // key to the config parameter of server hostname<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  // both master and region server<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  // Exception will be thrown if both are used.<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * This servers startcode.<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  protected final long startcode;<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * Unique identifier for the cluster we are a part of.<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  protected String clusterId;<a name="line.467"></a>
+<span class="sourceLineNo">446</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  // Exception will be thrown if both are used.<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * This servers startcode.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  protected final long startcode;<a name="line.454"></a>
+<span class="sourceLineNo">455</span><a name="line.455"></a>
+<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
+<span class="sourceLineNo">457</span>   * Unique identifier for the cluster we are a part of.<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   */<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  protected String clusterId;<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  /**<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * Chore to clean periodically the moved region list<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>  // chore for refreshing store files for secondary regions<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  private StorefileRefresherChore storefileRefresher;<a name="line.467"></a>
 <span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Chore to clean periodically the moved region list<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  // chore for refreshing store files for secondary regions<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  private StorefileRefresherChore storefileRefresher;<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  private RegionServerCoprocessorHost rsHost;<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>  /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * HBASE-3787) are:<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   */<a name="line.501"></a>
-<span class="sourceLineNo">502</span>  final ServerNonceManager nonceManager;<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>  private UserProvider userProvider;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  protected final RSRpcServices rpcServices;<a name="line.506"></a>
+<span class="sourceLineNo">469</span>  private RegionServerCoprocessorHost rsHost;<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /**<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   * HBASE-3787) are:<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.484"></a>
+<span class="sourceLineNo">485</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.485"></a>
+<span class="sourceLineNo">486</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  final ServerNonceManager nonceManager;<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  private UserProvider userProvider;<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>  protected final RSRpcServices rpcServices;<a name="line.498"></a>
+<span class="sourceLineNo">499</span><a name="line.499"></a>
+<span class="sourceLineNo">500</span>  protected CoordinatedStateManager csm;<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>  /**<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  protected final ConfigurationManager configurationManager;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  protected CoordinatedStateManager csm;<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   */<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  protected final ConfigurationManager configurationManager;<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.517"></a>
+<span class="sourceLineNo">508</span>  @VisibleForTesting<a name="line.508"></a>
+<span class="sourceLineNo">509</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.509"></a>
+<span class="sourceLineNo">510</span><a name="line.510"></a>
+<span class="sourceLineNo">511</span>  private volatile ThroughputController flushThroughputController;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.513"></a>
+<span class="sourceLineNo">514</span><a name="line.514"></a>
+<span class="sourceLineNo">515</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.515"></a>
+<span class="sourceLineNo">516</span><a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.517"></a>
 <span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  private volatile ThroughputController flushThroughputController;<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   */<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private final boolean masterless;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  /**<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * Starts a HRegionServer at the default location<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  // Don't start any services or managers in here in the Constructor.<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    super("RegionServer");  // thread name<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TraceUtil.initTracer(conf);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    try {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      this.startcode = System.currentTimeMillis();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      this.conf = conf;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.fsOk = true;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      HFile.checkHFileVersion(this.conf);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      checkCodecs(this.conf);<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.554"></a>
+<span class="sourceLineNo">519</span>  /**<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.523"></a>
+<span class="sourceLineNo">524</span>   */<a name="line.524"></a>
+<span class="sourceLineNo">525</span>  private final boolean masterless;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>   * Starts a HRegionServer at the default location<a name="line.529"></a>
+<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>  // Don't start any services or managers in here in the Constructor.<a name="line.531"></a>
+<span class="sourceLineNo">532</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    super("RegionServer");  // thread name<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    TraceUtil.initTracer(conf);<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    try {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      this.startcode = System.currentTimeMillis();<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      this.conf = conf;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>      this.fsOk = true;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      HFile.checkHFileVersion(this.conf);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      checkCodecs(this.conf);<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.546"></a>
+<span class="sourceLineNo">547</span><a name="line.547"></a>
+<span class="sourceLineNo">548</span>      // Disable usage of meta replicas in the regionserver<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      // Config'ed params<a name="line.550"></a>
+<span class="sourceLineNo">551</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.553"></a>
+<span class="sourceLineNo">554</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.554"></a>
 <span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>      // Disable usage of meta replicas in the regionserver<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      // Config'ed params<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.564"></a>
+<span class="sourceLineNo">556</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.556"></a>
+<span class="sourceLineNo">557</span><a name="line.557"></a>
+<span class="sourceLineNo">558</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.561"></a>
+<span class="sourceLineNo">562</span><a name="line.562"></a>
+<span class="sourceLineNo">563</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.564"></a>
 <span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.567"></a>
+<span class="sourceLineNo">566</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.566"></a>
+<span class="sourceLineNo">567</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.567"></a>
 <span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.571"></a>
-<span class="sourceLineNo">572</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.575"></a>
-<span class="sourceLineNo">576</span><a name="line.576"></a>
-<span class="sourceLineNo">577</span>      this.abortRequested = false;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      this.stopped = false;<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      rpcServices = createRpcServices();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String hostName =<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.583"></a>
-<span class="sourceLineNo">584</span>              : this.useThisHostnameInstead;<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // login the zookeeper client principal (if using security)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      // login the server principal (if using secure Hadoop)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      login(userProvider, hostName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
-<span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
-<span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
-<span class="sourceLineNo">608</span><a name="line.608"></a>
-<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
-<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
-<span class="sourceLineNo">633</span><a name="line.633"></a>
-<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
-<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
-<span class="sourceLineNo">661</span><a name="line.661"></a>
-<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span><a name="line.733"></a>
-<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
-<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
-<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
-<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">569</span>      this.abortRequested = false;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      this.stopped = false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span><a name="line.571"></a>
+<span class="sourceLineNo">572</span>      rpcServices = createRpcServices();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      String hostName =<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.575"></a>
+<span class="sourceLineNo">576</span>              : this.useThisHostnameInstead;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.579"></a>
+<span class="sourceLineNo">580</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>      // login the zookeeper client principal (if using security)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.583"></a>
+<span class="sourceLineNo">584</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.584"></a>
+<span class="sourceLineNo">585</span>      // login the server principal (if using secure Hadoop)<a name="line.585"></a>
+<span class="sourceLineNo">586</span>      login(userProvider, hostName);<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      // init superusers and add the server principal (if using security)<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      // or process owner as default super user.<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      Superusers.initialize(conf);<a name="line.589"></a>
+<span class="sourceLineNo">590</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.590"></a>
+<span class="sourceLineNo">591</span><a name="line.591"></a>
+<span class="sourceLineNo">592</span>      boolean isMasterNotCarryTable =<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>      // no need to instantiate global block cache when master not carry table<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      if (!isMasterNotCarryTable) {<a name="line.595"></a>
+<span class="sourceLineNo">596</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      }<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      cacheConfig = new CacheConfig(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.599"></a>
+<span class="sourceLineNo">600</span><a name="line.600"></a>
+<span class="sourceLineNo">601</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.601"></a>
+<span class="sourceLineNo">602</span>        @Override<a name="line.602"></a>
+<span class="sourceLineNo">603</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      };<a name="line.606"></a>
+<span class="sourceLineNo">607</span><a name="line.607"></a>
+<span class="sourceLineNo">608</span>      initializeFileSystem();<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.609"></a>
+<span class="sourceLineNo">610</span><a name="line.610"></a>
+<span class="sourceLineNo">611</span>      this.configurationManager = new ConfigurationManager();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.612"></a>
+<span class="sourceLineNo">613</span><a name="line.613"></a>
+<span class="sourceLineNo">614</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        // Open connection to zookeeper and set primary watcher<a name="line.616"></a>
+<span class="sourceLineNo">617</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        if (!this.masterless) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.621"></a>
+<span class="sourceLineNo">622</span><a name="line.622"></a>
+<span class="sourceLineNo">623</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          masterAddressTracker.start();<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          clusterStatusTracker.start();<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        } else {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          masterAddressTracker = null;<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          clusterStatusTracker = null;<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        }<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        zooKeeper = null;<a name="line.633"></a>
+<span class="sourceLineNo">634</span>        masterAddressTracker = null;<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        clusterStatusTracker = null;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      this.rpcServices.start(zooKeeper);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      // class HRS. TODO.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      this.choreService = new ChoreService(getName(), true);<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      this.executorService = new ExecutorService(getName());<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      putUpWebUI();<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    } catch (Throwable t) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // cause of failed startup is lost.<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      LOG.error("Failed construction RegionServer", t);<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      throw t;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
+<span class="sourceLineNo">653</span><a name="line.653"></a>
+<span class="sourceLineNo">654</span>  // HMaster should override this method to load the specific config for master<a name="line.654"></a>
+<span class="sourceLineNo">655</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.659"></a>
+<span class="sourceLineNo">660</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.660"></a>
+<span class="sourceLineNo">661</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.661"></a>
+<span class="sourceLineNo">662</span>        throw new IOException(msg);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>      } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>        return rpcServices.isa.getHostName();<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      return hostname;<a name="line.667"></a>
+<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
+<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
+<span class="sourceLineNo">670</span><a name="line.670"></a>
+<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>   * If running on Windows, do windows-specific setup.<a name="line.672"></a>
+<span class="sourceLineNo">673</span>   */<a name="line.673"></a>
+<span class="sourceLineNo">6

<TRUNCATED>

[33/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html
index b297b54..25f088c 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html
@@ -72,107 +72,7 @@
 <div class="header">
 <h2 title="Uses of Class org.apache.hadoop.hbase.zookeeper.MetaTableLocator" class="title">Uses of Class<br>org.apache.hadoop.hbase.zookeeper.MetaTableLocator</h2>
 </div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase">org.apache.hadoop.hbase</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.regionserver">org.apache.hadoop.hbase.regionserver</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.replication.regionserver">org.apache.hadoop.hbase.replication.regionserver</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a> in <a href="../../../../../../org/apache/hadoop/hbase/package-summary.html">org.apache.hadoop.hbase</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/package-summary.html">org.apache.hadoop.hbase</a> that return <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><span class="typeNameLabel">Server.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator</a></span>()</code>
-<div class="block">Returns instance of <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
- running inside this server.</div>
-</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList"><a name="org.apache.hadoop.hbase.regionserver">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a> in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a> declared as <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>protected <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><span class="typeNameLabel">HRegionServer.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#metaTableLocator">metaTableLocator</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a> that return <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><span class="typeNameLabel">HRegionServer.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetaTableLocator--">getMetaTableLocator</a></span>()</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList"><a name="org.apache.hadoop.hbase.replication.regionserver">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a> in <a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/package-summary.html">org.apache.hadoop.hbase.replication.regionserver</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/package-summary.html">org.apache.hadoop.hbase.replication.regionserver</a> that return <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></code></td>
-<td class="colLast"><span class="typeNameLabel">ReplicationSyncUp.DummyServer.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.DummyServer.html#getMetaTableLocator--">getMetaTableLocator</a></span>()</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
+<div class="classUseContainer">No usage of org.apache.hadoop.hbase.zookeeper.MetaTableLocator</div>
 <!-- ======= START OF BOTTOM NAVBAR ====== -->
 <div class="bottomNav"><a name="navbar.bottom">
 <!--   -->

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html
index 804473a..7cd57f0 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html
@@ -794,6 +794,42 @@
 </tr>
 </tbody>
 </table>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
+<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/rsgroup/package-summary.html">org.apache.hadoop.hbase.rsgroup</a> with parameters of type <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                       <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                       long&nbsp;timeout,
+                       int&nbsp;replicaId)</code>
+<div class="block">Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the
+ specified timeout for availability.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static boolean</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
+                        <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                        long&nbsp;timeout)</code>
+<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static boolean</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+                        <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                        long&nbsp;timeout,
+                        int&nbsp;replicaId)</code>
+<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
+</td>
+</tr>
+</tbody>
+</table>
 </li>
 <li class="blockList"><a name="org.apache.hadoop.hbase.security.access">
 <!--   -->
@@ -1072,7 +1108,7 @@
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">blockUntilAvailable</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    int&nbsp;replicaId,
                    long&nbsp;timeout)</code>
@@ -1080,14 +1116,14 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">blockUntilAvailable</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    long&nbsp;timeout)</code>
 <div class="block">Wait until the meta region is available and is not in transition.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-org.apache.hadoop.conf.Configuration-">blockUntilAvailable</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    long&nbsp;timeout,
                    org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
@@ -1213,13 +1249,13 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">deleteMetaLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper)</code>
 <div class="block">Deletes the location of <code>hbase:meta</code> in ZooKeeper.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">deleteMetaLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                   int&nbsp;replicaId)</code>&nbsp;</td>
 </tr>
@@ -1335,37 +1371,37 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Gets the meta region location, if available.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                      int&nbsp;replicaId)</code>
 <div class="block">Gets the meta region location, if available.</div>
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegions</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Gets the meta regions for the given path with the default replica ID.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegions</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
               int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions for the given path and replica ID.</div>
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                           int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions and their locations for the given path and replica ID.</div>
@@ -1385,66 +1421,56 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                       <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                       long&nbsp;timeout,
-                       int&nbsp;replicaId)</code>
-<div class="block">Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the
- specified timeout for availability.</div>
-</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#getNumberOfChildren-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">getNumberOfChildren</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
 <div class="block">Get the number of children of the specified node.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#getReplicationZnodesDump-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getReplicationZnodesDump</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Returns a string with replication znodes and position of the replication log</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#getReplicationZnodesDump-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.StringBuilder-">getReplicationZnodesDump</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true" title="class or interface in java.lang">StringBuilder</a>&nbsp;sb)</code>
 <div class="block">Appends replication znodes to the passed StringBuilder.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><span class="typeNameLabel">ZKSplitLog.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKSplitLog.html#getRescanNode-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getRescanNode</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true" title="class or interface in java.util">UUID</a></code></td>
 <td class="colLast"><span class="typeNameLabel">ZKClusterId.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKClusterId.html#getUUIDForCluster-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getUUIDForCluster</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Get the UUID for the provided ZK watcher.</div>
 </td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>boolean</code></td>
+<tr class="rowColor">
+<td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#isLocationAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">isLocationAvailable</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Checks if the meta region location is available.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKSplitLog.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKSplitLog.html#isRescanNode-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">isRescanNode</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</code>
 <div class="block">Checks if the given path represents a rescan node.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#listChildrenAndWatchForNewChildren-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">listChildrenAndWatchForNewChildren</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
 <div class="block">Lists the children znodes of the specified znode.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#listChildrenAndWatchThem-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">listChildrenAndWatchThem</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
@@ -1453,7 +1479,7 @@
  the NodeCreated and NodeDeleted events.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#listChildrenBFSAndWatchThem-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">listChildrenBFSAndWatchThem</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
@@ -1461,7 +1487,7 @@
  in the same order as that of the traversal.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#listChildrenBFSNoWatch-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">listChildrenBFSNoWatch</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
@@ -1469,28 +1495,28 @@
  in the same order as that of the traversal.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#listChildrenNoWatch-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">listChildrenNoWatch</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
 <div class="block">Lists the children of the specified znode without setting any watches.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#logRetrievedMsg-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-byte:A-boolean-">logRetrievedMsg</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                byte[]&nbsp;data,
                boolean&nbsp;watcherSet)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#logZKTree-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">logZKTree</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;root)</code>
 <div class="block">Recursively print the current state of ZK (non-transactional)</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#logZKTree-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-java.lang.String-">logZKTree</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;root,
@@ -1498,7 +1524,7 @@
 <div class="block">Helper method to print the current state of the ZK tree.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#multiOrSequential-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.util.List-boolean-">multiOrSequential</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&gt;&nbsp;ops,
@@ -1506,34 +1532,34 @@
 <div class="block">Use ZooKeeper's multi-update functionality.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#nodeHasChildren-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">nodeHasChildren</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
 <div class="block">Checks if the specified znode has any children.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#processSequentially-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.util.List-">processSequentially</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&gt;&nbsp;ops)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><span class="typeNameLabel">ZKClusterId.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKClusterId.html#readClusterIdZNode-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">readClusterIdZNode</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;watcher)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKAclReset.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKAclReset.html#resetAcls-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-boolean-">resetAcls</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
          boolean&nbsp;eraseAcls)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKClusterId.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKClusterId.html#setClusterId-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ClusterId-">setClusterId</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;watcher,
             <a href="../../../../../../org/apache/hadoop/hbase/ClusterId.html" title="class in org.apache.hadoop.hbase">ClusterId</a>&nbsp;id)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#setData-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-byte:A-">setData</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
@@ -1541,7 +1567,7 @@
 <div class="block">Sets the data of the existing znode to be the specified data.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#setData-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-byte:A-int-">setData</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
@@ -1550,12 +1576,12 @@
 <div class="block">Sets the data of the existing znode to be the specified data.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#setData-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp.SetData-">setData</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
        <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp.SetData</a>&nbsp;setData)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">MasterAddressTracker.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.html#setMasterAddress-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-org.apache.hadoop.hbase.ServerName-int-">setMasterAddress</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
@@ -1566,7 +1592,7 @@
  path.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#setMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ServerName-int-org.apache.hadoop.hbase.master.RegionState.State-">setMetaLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
@@ -1575,7 +1601,7 @@
 <div class="block">Sets the location of <code>hbase:meta</code> in ZooKeeper to the specified server address.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#setMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ServerName-org.apache.hadoop.hbase.master.RegionState.State-">setMetaLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
@@ -1584,21 +1610,21 @@
  specified server address.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#setWatchIfNodeExists-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-">setWatchIfNodeExists</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode)</code>
 <div class="block">Watch the specified znode, but only if exists.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static org.apache.zookeeper.Op</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#toZooKeeperOp-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp-">toZooKeeperOp</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
              <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;op)</code>
 <div class="block">Convert from ZKUtilOp to ZKOp</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><span class="typeNameLabel">ZKUtil.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.html#updateExistingNodeData-org.apache.hadoop.hbase.zookeeper.ZKWatcher-java.lang.String-byte:A-int-">updateExistingNodeData</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
@@ -1609,31 +1635,8 @@
 </div>
 </td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>boolean</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
-                        <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                        long&nbsp;timeout)</code>
-<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
-</td>
-</tr>
 <tr class="altColor">
-<td class="colFirst"><code>boolean</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                        <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                        long&nbsp;timeout,
-                        int&nbsp;replicaId)</code>
-<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
-</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>void</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">waitMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
-<div class="block">Waits indefinitely for availability of <code>hbase:meta</code>.</div>
-</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       int&nbsp;replicaId,
                       long&nbsp;timeout)</code>
@@ -1642,11 +1645,11 @@
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       long&nbsp;timeout)</code>
-<div class="block">Gets the meta region location, if available, and waits for up to the
- specified timeout if not immediately available.</div>
+<div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
+ immediately available.</div>
 </td>
 </tr>
 <tr class="altColor">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/package-summary.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/package-summary.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/package-summary.html
index d35a912..25bd993 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/package-summary.html
@@ -155,8 +155,8 @@
 <tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a></td>
 <td class="colLast">
-<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper
- which keeps hbase:meta region server location.</div>
+<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper which
+ keeps hbase:meta region server location.</div>
 </td>
 </tr>
 <tr class="altColor">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/package-use.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/package-use.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/package-use.html
index ff064b9..cd257bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/package-use.html
@@ -175,12 +175,6 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html#org.apache.hadoop.hbase">MetaTableLocator</a>
-<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper
- which keeps hbase:meta region server location.</div>
-</td>
-</tr>
-<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html#org.apache.hadoop.hbase">ZKWatcher</a>
 <div class="block">Acts as the single ZooKeeper Watcher.</div>
 </td>
@@ -377,17 +371,11 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html#org.apache.hadoop.hbase.regionserver">MetaTableLocator</a>
-<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper
- which keeps hbase:meta region server location.</div>
-</td>
-</tr>
-<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/class-use/ZKNodeTracker.html#org.apache.hadoop.hbase.regionserver">ZKNodeTracker</a>
 <div class="block">Tracks the availability and value of a single ZooKeeper node.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html#org.apache.hadoop.hbase.regionserver">ZKWatcher</a>
 <div class="block">Acts as the single ZooKeeper Watcher.</div>
 </td>
@@ -449,12 +437,6 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/class-use/MetaTableLocator.html#org.apache.hadoop.hbase.replication.regionserver">MetaTableLocator</a>
-<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper
- which keeps hbase:meta region server location.</div>
-</td>
-</tr>
-<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/class-use/ZKWatcher.html#org.apache.hadoop.hbase.replication.regionserver">ZKWatcher</a>
 <div class="block">Acts as the single ZooKeeper Watcher.</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/Server.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Server.html b/devapidocs/src-html/org/apache/hadoop/hbase/Server.html
index a7e73cb..868b927 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Server.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Server.html
@@ -6,121 +6,110 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
-<span class="sourceLineNo">002</span> *<a name="line.2"></a>
-<span class="sourceLineNo">003</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.3"></a>
-<span class="sourceLineNo">004</span> * or more contributor license agreements.  See the NOTICE file<a name="line.4"></a>
-<span class="sourceLineNo">005</span> * distributed with this work for additional information<a name="line.5"></a>
-<span class="sourceLineNo">006</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.6"></a>
-<span class="sourceLineNo">007</span> * to you under the Apache License, Version 2.0 (the<a name="line.7"></a>
-<span class="sourceLineNo">008</span> * "License"); you may not use this file except in compliance<a name="line.8"></a>
-<span class="sourceLineNo">009</span> * with the License.  You may obtain a copy of the License at<a name="line.9"></a>
-<span class="sourceLineNo">010</span> *<a name="line.10"></a>
-<span class="sourceLineNo">011</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.11"></a>
-<span class="sourceLineNo">012</span> *<a name="line.12"></a>
-<span class="sourceLineNo">013</span> * Unless required by applicable law or agreed to in writing, software<a name="line.13"></a>
-<span class="sourceLineNo">014</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.14"></a>
-<span class="sourceLineNo">015</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.15"></a>
-<span class="sourceLineNo">016</span> * See the License for the specific language governing permissions and<a name="line.16"></a>
-<span class="sourceLineNo">017</span> * limitations under the License.<a name="line.17"></a>
-<span class="sourceLineNo">018</span> */<a name="line.18"></a>
-<span class="sourceLineNo">019</span>package org.apache.hadoop.hbase;<a name="line.19"></a>
-<span class="sourceLineNo">020</span><a name="line.20"></a>
+<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
+<span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
+<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
+<span class="sourceLineNo">005</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.5"></a>
+<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a>
+<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a>
+<span class="sourceLineNo">008</span> * with the License.  You may obtain a copy of the License at<a name="line.8"></a>
+<span class="sourceLineNo">009</span> *<a name="line.9"></a>
+<span class="sourceLineNo">010</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a>
+<span class="sourceLineNo">011</span> *<a name="line.11"></a>
+<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a>
+<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a>
+<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a>
+<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a>
+<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a>
+<span class="sourceLineNo">017</span> */<a name="line.17"></a>
+<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase;<a name="line.18"></a>
+<span class="sourceLineNo">019</span><a name="line.19"></a>
+<span class="sourceLineNo">020</span>import java.io.IOException;<a name="line.20"></a>
 <span class="sourceLineNo">021</span>import org.apache.hadoop.conf.Configuration;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import org.apache.hadoop.fs.FileSystem;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.client.ClusterConnection;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
-<span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.IOException;<a name="line.29"></a>
-<span class="sourceLineNo">030</span><a name="line.30"></a>
-<span class="sourceLineNo">031</span>/**<a name="line.31"></a>
-<span class="sourceLineNo">032</span> * Defines a curated set of shared functions implemented by HBase servers (Masters<a name="line.32"></a>
-<span class="sourceLineNo">033</span> * and RegionServers). For use internally only. Be judicious adding API. Changes cause ripples<a name="line.33"></a>
-<span class="sourceLineNo">034</span> * through the code base.<a name="line.34"></a>
-<span class="sourceLineNo">035</span> */<a name="line.35"></a>
-<span class="sourceLineNo">036</span>@InterfaceAudience.Private<a name="line.36"></a>
-<span class="sourceLineNo">037</span>public interface Server extends Abortable, Stoppable {<a name="line.37"></a>
-<span class="sourceLineNo">038</span>  /**<a name="line.38"></a>
-<span class="sourceLineNo">039</span>   * Gets the configuration object for this server.<a name="line.39"></a>
-<span class="sourceLineNo">040</span>   */<a name="line.40"></a>
-<span class="sourceLineNo">041</span>  Configuration getConfiguration();<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>  /**<a name="line.43"></a>
-<span class="sourceLineNo">044</span>   * Gets the ZooKeeper instance for this server.<a name="line.44"></a>
-<span class="sourceLineNo">045</span>   */<a name="line.45"></a>
-<span class="sourceLineNo">046</span>  ZKWatcher getZooKeeper();<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>  /**<a name="line.48"></a>
-<span class="sourceLineNo">049</span>   * Returns a reference to the servers' connection.<a name="line.49"></a>
-<span class="sourceLineNo">050</span>   *<a name="line.50"></a>
-<span class="sourceLineNo">051</span>   * Important note: this method returns a reference to Connection which is managed<a name="line.51"></a>
-<span class="sourceLineNo">052</span>   * by Server itself, so callers must NOT attempt to close connection obtained.<a name="line.52"></a>
-<span class="sourceLineNo">053</span>   */<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  Connection getConnection();<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  Connection createConnection(Configuration conf) throws IOException;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  /**<a name="line.58"></a>
-<span class="sourceLineNo">059</span>   * Returns a reference to the servers' cluster connection. Prefer {@link #getConnection()}.<a name="line.59"></a>
-<span class="sourceLineNo">060</span>   *<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * Important note: this method returns a reference to Connection which is managed<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * by Server itself, so callers must NOT attempt to close connection obtained.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   */<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  ClusterConnection getClusterConnection();<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  /**<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   * Returns instance of {@link org.apache.hadoop.hbase.zookeeper.MetaTableLocator}<a name="line.67"></a>
-<span class="sourceLineNo">068</span>   * running inside this server. This MetaServerLocator is started and stopped by server, clients<a name="line.68"></a>
-<span class="sourceLineNo">069</span>   * shouldn't manage it's lifecycle.<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * @return instance of {@link MetaTableLocator} associated with this server.<a name="line.70"></a>
-<span class="sourceLineNo">071</span>   */<a name="line.71"></a>
-<span class="sourceLineNo">072</span>  MetaTableLocator getMetaTableLocator();<a name="line.72"></a>
-<span class="sourceLineNo">073</span><a name="line.73"></a>
-<span class="sourceLineNo">074</span>  /**<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   * @return The unique server name for this server.<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   */<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  ServerName getServerName();<a name="line.77"></a>
-<span class="sourceLineNo">078</span><a name="line.78"></a>
-<span class="sourceLineNo">079</span>  /**<a name="line.79"></a>
-<span class="sourceLineNo">080</span>   * Get CoordinatedStateManager instance for this server.<a name="line.80"></a>
-<span class="sourceLineNo">081</span>   */<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  CoordinatedStateManager getCoordinatedStateManager();<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * @return The {@link ChoreService} instance for this server<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  ChoreService getChoreService();<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>  /**<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * @return Return the FileSystem object used (can return null!).<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   */<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  // TODO: On Master, return Master's. On RegionServer, return RegionServers. The FileSystems<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  // may differ. TODO.<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  default FileSystem getFileSystem() {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    // This default is pretty dodgy!<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    Configuration c = getConfiguration();<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    FileSystem fs = null;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    try {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      if (c != null) {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>        fs = FileSystem.get(c);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      }<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    } catch (IOException e) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      // If an exception, just return null<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    }<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    return fs;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  }<a name="line.106"></a>
-<span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  /**<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * @return True is the server is Stopping<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   */<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // Note: This method is not part of the Stoppable Interface.<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  default boolean isStopping() {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    return false;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
-<span class="sourceLineNo">115</span>}<a name="line.115"></a>
+<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
+<span class="sourceLineNo">027</span><a name="line.27"></a>
+<span class="sourceLineNo">028</span>/**<a name="line.28"></a>
+<span class="sourceLineNo">029</span> * Defines a curated set of shared functions implemented by HBase servers (Masters<a name="line.29"></a>
+<span class="sourceLineNo">030</span> * and RegionServers). For use internally only. Be judicious adding API. Changes cause ripples<a name="line.30"></a>
+<span class="sourceLineNo">031</span> * through the code base.<a name="line.31"></a>
+<span class="sourceLineNo">032</span> */<a name="line.32"></a>
+<span class="sourceLineNo">033</span>@InterfaceAudience.Private<a name="line.33"></a>
+<span class="sourceLineNo">034</span>public interface Server extends Abortable, Stoppable {<a name="line.34"></a>
+<span class="sourceLineNo">035</span>  /**<a name="line.35"></a>
+<span class="sourceLineNo">036</span>   * Gets the configuration object for this server.<a name="line.36"></a>
+<span class="sourceLineNo">037</span>   */<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  Configuration getConfiguration();<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>  /**<a name="line.40"></a>
+<span class="sourceLineNo">041</span>   * Gets the ZooKeeper instance for this server.<a name="line.41"></a>
+<span class="sourceLineNo">042</span>   */<a name="line.42"></a>
+<span class="sourceLineNo">043</span>  ZKWatcher getZooKeeper();<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>  /**<a name="line.45"></a>
+<span class="sourceLineNo">046</span>   * Returns a reference to the servers' connection.<a name="line.46"></a>
+<span class="sourceLineNo">047</span>   *<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * Important note: this method returns a reference to Connection which is managed<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   * by Server itself, so callers must NOT attempt to close connection obtained.<a name="line.49"></a>
+<span class="sourceLineNo">050</span>   */<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  Connection getConnection();<a name="line.51"></a>
+<span class="sourceLineNo">052</span><a name="line.52"></a>
+<span class="sourceLineNo">053</span>  Connection createConnection(Configuration conf) throws IOException;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  /**<a name="line.55"></a>
+<span class="sourceLineNo">056</span>   * Returns a reference to the servers' cluster connection. Prefer {@link #getConnection()}.<a name="line.56"></a>
+<span class="sourceLineNo">057</span>   *<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * Important note: this method returns a reference to Connection which is managed<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   * by Server itself, so callers must NOT attempt to close connection obtained.<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   */<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  ClusterConnection getClusterConnection();<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  /**<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * @return The unique server name for this server.<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  ServerName getServerName();<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>  /**<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   * Get CoordinatedStateManager instance for this server.<a name="line.69"></a>
+<span class="sourceLineNo">070</span>   */<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  CoordinatedStateManager getCoordinatedStateManager();<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  /**<a name="line.73"></a>
+<span class="sourceLineNo">074</span>   * @return The {@link ChoreService} instance for this server<a name="line.74"></a>
+<span class="sourceLineNo">075</span>   */<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  ChoreService getChoreService();<a name="line.76"></a>
+<span class="sourceLineNo">077</span><a name="line.77"></a>
+<span class="sourceLineNo">078</span>  /**<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * @return Return the FileSystem object used (can return null!).<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   */<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  // TODO: On Master, return Master's. On RegionServer, return RegionServers. The FileSystems<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  // may differ. TODO.<a name="line.82"></a>
+<span class="sourceLineNo">083</span>  default FileSystem getFileSystem() {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    // This default is pretty dodgy!<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    Configuration c = getConfiguration();<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    FileSystem fs = null;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    try {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      if (c != null) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        fs = FileSystem.get(c);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      }<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    } catch (IOException e) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>      // If an exception, just return null<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    return fs;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  /**<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * @return True is the server is Stopping<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   */<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  // Note: This method is not part of the Stoppable Interface.<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  default boolean isStopping() {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    return false;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
+<span class="sourceLineNo">104</span>}<a name="line.104"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 320dc8e..f10a569 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  public static final String version = "3.0.0-SNAPSHOT";<a name="line.10"></a>
 <span class="sourceLineNo">011</span>  public static final String revision = "";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = "jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Mon Dec  3 14:44:16 UTC 2018";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Wed Dec  5 14:44:22 UTC 2018";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = "git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum = "6b524fd5dc892868017c9a7a944df62c";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum = "888b7c7a3be5b9ed5222c3804dd3faa1";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>
 
 


[16/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUti

<TRUNCATED>

[47/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 8080e27..dc3be8f 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -481,7 +481,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/master/MasterServices
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT">ABORT_TIMEOUT</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT_TASK">ABORT_TIMEOUT_TASK</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheConfig">cacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheFlusher">cacheFlusher</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterConnection">clusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterId">clusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterStatusTracker">clusterStatusTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#compactSplitThread">compactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/reg
 ionserver/HRegionServer.html#conf">conf</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#configurationManager">configurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#csm">csm</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executorService">executorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fs">fs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsOk">fsOk</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsUtilizationChore">fsUtilizationChore</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#hMemManager">hMemManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#infoServer">infoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#le
 ases">leases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#lock">lock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#MASTER_HOSTNAME_KEY">MASTER_HOSTNAME_KEY</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#metaTableLocator">metaTableLocator</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegions">movedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#msgInterval">msgInterval</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#numRegionsToReport">numRegionsToReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onlineRegions">onlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionFavoredNodesMap">regionFavoredNodesMap</a>, <a href="../../../../../org/apache/hadoop/hbase
 /regionserver/HRegionServer.html#REGIONSERVER">REGIONSERVER</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionsInTransitionInRS">regionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSinkHandler">replicationSinkHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSourceHandler">replicationSourceHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcServices">rpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#secureBulkLoadManager">secureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#serverName">serverName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sleeper">sleeper</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#
 startcode">startcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tableDescriptors">tableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#TEST_SKIP_REPORTING_TRANSITION">TEST_SKIP_REPORTING_TRANSITION</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#threadWakeFrequency">threadWakeFrequency</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#useThisHostnameInstead">useThisHostnameInstead</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFactory">walFactory</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFs">walFs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRoller">walRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#zooKeeper">zooKeeper</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT">ABORT_TIMEOUT</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT_TASK">ABORT_TIMEOUT_TASK</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheConfig">cacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheFlusher">cacheFlusher</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterConnection">clusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterId">clusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterStatusTracker">clusterStatusTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#compactSplitThread">compactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/reg
 ionserver/HRegionServer.html#conf">conf</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#configurationManager">configurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#csm">csm</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executorService">executorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fs">fs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsOk">fsOk</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsUtilizationChore">fsUtilizationChore</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#hMemManager">hMemManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#infoServer">infoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#le
 ases">leases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#lock">lock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#MASTER_HOSTNAME_KEY">MASTER_HOSTNAME_KEY</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegions">movedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#msgInterval">msgInterval</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#numRegionsToReport">numRegionsToReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onlineRegions">onlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionFavoredNodesMap">regionFavoredNodesMap</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#REGIONSERVER">REGIONSERVER</a>, <a href="../../../../../org/apache/hadoop/hbase/regions
 erver/HRegionServer.html#regionsInTransitionInRS">regionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSinkHandler">replicationSinkHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSourceHandler">replicationSourceHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcServices">rpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#secureBulkLoadManager">secureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#serverName">serverName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sleeper">sleeper</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#startcode">startcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tableDescripto
 rs">tableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#TEST_SKIP_REPORTING_TRANSITION">TEST_SKIP_REPORTING_TRANSITION</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#threadWakeFrequency">threadWakeFrequency</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#useThisHostnameInstead">useThisHostnameInstead</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFactory">walFactory</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFs">walFs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRoller">walRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#zooKeeper">zooKeeper</a></code></li>
 </ul>
 </li>
 </ul>
@@ -1467,7 +1467,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/master/MasterServices
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#abort-java.lang.String-">abort</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addRegion-org.apache.hadoop.hbase.regionserver.HRegion-">addRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addToMovedRegions-java.lang.String-org.apache.hadoop.hbase.ServerName-long-">addToMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#checkFileSystem--">checkFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cleanMovedRegions--">cleanMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clearRegionBlockCache-org.apache.hadoop.hbase.regionserver.Region-">clearRegionBlockCache</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAllRegions-boolean-">closeAllRegions</a
 >, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionLoad-java.lang.String-">createRegionLoad</a>,
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub--">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub-boolean-">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#execRegionServerService-org.apache.hbase.thirdparty.com.google.protobuf.RpcController-org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest-">execRegionServerService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executeProcedure-long-org.apache.hadoop.hbase.procedure2.RSProcedureCallable-">executeProcedure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCacheConfig--">getCacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getChoreService--">getChoreServi
 ce</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterId--">getClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionPressure--">getCompactionPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionRequestor--">getCompactionRequestor</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactSplitThread--">getCompactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfigurationManager--">getConfigurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConnec
 tion--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getEventLoopGroupConfig--">getEventLoopGroupConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getExecutorService--">getExecutorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFavoredNodesForRegion-java.lang.String-">getFavoredNodesForRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushPressure--">getFlushPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushRequester--">getFlushRequester</a>, <a href="../../../../../org/apache/hadoop/hbase/re
 gionserver/HRegionServer.html#getFlushThroughputController--">getFlushThroughputController</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFsTableDescriptors--">getFsTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getHeapMemoryManager--">getHeapMemoryManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getInfoServer--">getInfoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLastSequenceId-byte:A-">getLastSequenceId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLeases--">getLeases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMasterAddressTracker--">getMasterAddressTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetaTableLocator--">getMetaTableLocator</a>, <a href="../../../../
 ../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetrics--">getMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMostLoadedRegions--">getMostLoadedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNonceManager--">getNonceManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNumberOfOnlineRegions--">getNumberOfOnlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegion-byte:A-">getOnlineRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegionsLocalContext--">getOnlineRegionsLocalContext</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineTables--">getOnlineTables</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-byte:A-">getRegion</a>, <a href="../..
 /../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-java.lang.String-">getRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionBlockLocations-java.lang.String-">getRegionBlockLocations</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-byte:A-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions--">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions-org.apache.hadoop.hbase.TableName-">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerAccounting--">getRegionServerAccounting</a>, <a href="../../../../../org/apache/
 hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessorHost--">getRegionServerCoprocessorHost</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessors--">getRegionServerCoprocessors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerMetrics--">getRegionServerMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerRpcQuotaManager--">getRegionServerRpcQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerSpaceQuotaManager--">getRegionServerSpaceQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionsInTransitionInRS--">getRegionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSinkService--">getReplicationSinkService</a>, <a href="../../../
 ../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSourceService--">getReplicationSourceService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRootDir--">getRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRpcServer--">getRpcServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRSRpcServices--">getRSRpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getSecureBulkLoadManager--">getSecureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getStartcode--">getStartcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getTableDescriptors--">getTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getThreadWakeFrequency--">getThreadWakeFrequency</a>, <a href="..
 /../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWAL-org.apache.hadoop.hbase.client.RegionInfo-">getWAL</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALFileSystem--">getWALFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalGroupsReplicationStatus--">getWalGroupsReplicationStatus</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalRoller--">getWalRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALRootDir--">getWALRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALs--">getWALs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#handleReportForDutyResponse-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse-">handleReportForDutyResponse</a>, <a href="../../
 ../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeMemStoreChunkCreator--">initializeMemStoreChunkCreator</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isAborted--">isAborted</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isClusterUp--">isClusterUp</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isOnline--">isOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isShutDown--">isShutDown</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopped--">isStopped</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopping--">isStopping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#kill--">kill</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegionCleanerPeriod--">mo
 vedRegionCleanerPeriod</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onConfigurationChange-org.apache.hadoop.conf.Configuration-">onConfigurationChange</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#postOpenDeployTasks-org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext-">postOpenDeployTasks</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionLock-java.util.List-java.lang.String-org.apache.hadoop.hbase.Abortable-">regionLock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#remoteProcedureComplete-long-java.lang.Throwable-">remoteProcedureComplete</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#removeRegion-org.apache.hadoop.hbase.regionserver.HRegion-org.apache.hadoop.hbase.ServerName-">removeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegi
 onServer.html#reportFileArchivalForQuotas-org.apache.hadoop.hbase.TableName-java.util.Collection-">reportFileArchivalForQuotas</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionSizesForQuotas-org.apache.hadoop.hbase.quotas.RegionSizeStore-">reportRegionSizesForQuotas</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionStateTransition-org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext-">reportRegionStateTransition</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sendShutdownInterrupt--">sendShutdownInterrupt</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupClusterConnection--">setupClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stop-java.lang.String-boolean-org.apache.hadoop.hbase.security.User-">stop</a>, <a href="../../../../
 ../org/apache/hadoop/hbase/regionserver/HRegionServer.html#toString--">toString</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tryRegionServerReport-long-long-">tryRegionServerReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#unassign-byte:A-">unassign</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateConfiguration--">updateConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateRegionFavoredNodesMapping-java.lang.String-java.util.List-">updateRegionFavoredNodesMapping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#waitForServerOnline--">waitForServerOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRollRequestFinished--">walRollRequestFinished</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#abort-java.lang.String-">abort</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addRegion-org.apache.hadoop.hbase.regionserver.HRegion-">addRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addToMovedRegions-java.lang.String-org.apache.hadoop.hbase.ServerName-long-">addToMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#checkFileSystem--">checkFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cleanMovedRegions--">cleanMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clearRegionBlockCache-org.apache.hadoop.hbase.regionserver.Region-">clearRegionBlockCache</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAllRegions-boolean-">closeAllRegions</a
 >, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionLoad-java.lang.String-">createRegionLoad</a>,
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub--">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub-boolean-">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#execRegionServerService-org.apache.hbase.thirdparty.com.google.protobuf.RpcController-org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest-">execRegionServerService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executeProcedure-long-org.apache.hadoop.hbase.procedure2.RSProcedureCallable-">executeProcedure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCacheConfig--">getCacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getChoreService--">getChoreServi
 ce</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterId--">getClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionPressure--">getCompactionPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionRequestor--">getCompactionRequestor</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactSplitThread--">getCompactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfigurationManager--">getConfigurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConnec
 tion--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getEventLoopGroupConfig--">getEventLoopGroupConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getExecutorService--">getExecutorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFavoredNodesForRegion-java.lang.String-">getFavoredNodesForRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushPressure--">getFlushPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushRequester--">getFlushRequester</a>, <a href="../../../../../org/apache/hadoop/hbase/re
 gionserver/HRegionServer.html#getFlushThroughputController--">getFlushThroughputController</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFsTableDescriptors--">getFsTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getHeapMemoryManager--">getHeapMemoryManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getInfoServer--">getInfoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLastSequenceId-byte:A-">getLastSequenceId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLeases--">getLeases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMasterAddressTracker--">getMasterAddressTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetrics--">getMetrics</a>, <a href="../../../../../org/apache/hado
 op/hbase/regionserver/HRegionServer.html#getMostLoadedRegions--">getMostLoadedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNonceManager--">getNonceManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNumberOfOnlineRegions--">getNumberOfOnlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegion-byte:A-">getOnlineRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegionsLocalContext--">getOnlineRegionsLocalContext</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineTables--">getOnlineTables</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-byte:A-">getRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-java.lang.String-">getRegion</a>, <a href="../../../
 ../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionBlockLocations-java.lang.String-">getRegionBlockLocations</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-byte:A-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions--">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions-org.apache.hadoop.hbase.TableName-">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerAccounting--">getRegionServerAccounting</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessorHost--">getRegionServerCoprocessorHost</a>, <a href="../.
 ./../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessors--">getRegionServerCoprocessors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerMetrics--">getRegionServerMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerRpcQuotaManager--">getRegionServerRpcQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerSpaceQuotaManager--">getRegionServerSpaceQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionsInTransitionInRS--">getRegionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSinkService--">getReplicationSinkService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSourceService--">getReplicationSourceService</a>, <a
  href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRootDir--">getRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRpcServer--">getRpcServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRSRpcServices--">getRSRpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getSecureBulkLoadManager--">getSecureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getStartcode--">getStartcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getTableDescriptors--">getTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getThreadWakeFrequency--">getThreadWakeFrequency</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWAL-org.apache.hadoop.hbase.client.RegionInfo-">getWAL</
 a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALFileSystem--">getWALFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalGroupsReplicationStatus--">getWalGroupsReplicationStatus</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalRoller--">getWalRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALRootDir--">getWALRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALs--">getWALs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#handleReportForDutyResponse-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse-">handleReportForDutyResponse</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeMemStoreChunkCreator--">initializeMemStoreChunkCreato
 r</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isAborted--">isAborted</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isClusterUp--">isClusterUp</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isOnline--">isOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isShutDown--">isShutDown</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopped--">isStopped</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopping--">isStopping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#kill--">kill</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegionCleanerPeriod--">movedRegionCleanerPeriod</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onConfigurationChang
 e-org.apache.hadoop.conf.Configuration-">onConfigurationChange</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#postOpenDeployTasks-org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext-">postOpenDeployTasks</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionLock-java.util.List-java.lang.String-org.apache.hadoop.hbase.Abortable-">regionLock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#remoteProcedureComplete-long-java.lang.Throwable-">remoteProcedureComplete</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#removeRegion-org.apache.hadoop.hbase.regionserver.HRegion-org.apache.hadoop.hbase.ServerName-">removeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportFileArchivalForQuotas-org.apache.hadoop.hbase.TableName-java.util.Collection-">reportFileArchivalForQuotas</
 a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionSizesForQuotas-org.apache.hadoop.hbase.quotas.RegionSizeStore-">reportRegionSizesForQuotas</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionStateTransition-org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext-">reportRegionStateTransition</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sendShutdownInterrupt--">sendShutdownInterrupt</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupClusterConnection--">setupClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stop-java.lang.String-boolean-org.apache.hadoop.hbase.security.User-">stop</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#toString--">toString</a>, <a href="../../../../../org/apache/hadoop/h
 base/regionserver/HRegionServer.html#tryRegionServerReport-long-long-">tryRegionServerReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#unassign-byte:A-">unassign</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateConfiguration--">updateConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateRegionFavoredNodesMapping-java.lang.String-java.util.List-">updateRegionFavoredNodesMapping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#waitForServerOnline--">waitForServerOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRollRequestFinished--">walRollRequestFinished</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.util.HasThread">
@@ -1495,7 +1495,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/master/MasterServices
 <!--   -->
 </a>
 <h3>Methods inherited from interface&nbsp;org.apache.hadoop.hbase.<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.Abortable">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
index 7cec044..b1098db 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMasterCommandLine.LocalHMaster.html
@@ -198,7 +198,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" tit
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT">ABORT_TIMEOUT</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT_TASK">ABORT_TIMEOUT_TASK</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheConfig">cacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheFlusher">cacheFlusher</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterConnection">clusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterId">clusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterStatusTracker">clusterStatusTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#compactSplitThread">compactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/reg
 ionserver/HRegionServer.html#conf">conf</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#configurationManager">configurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#csm">csm</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executorService">executorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fs">fs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsOk">fsOk</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsUtilizationChore">fsUtilizationChore</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#hMemManager">hMemManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#infoServer">infoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#le
 ases">leases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#lock">lock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#MASTER_HOSTNAME_KEY">MASTER_HOSTNAME_KEY</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#metaTableLocator">metaTableLocator</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegions">movedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#msgInterval">msgInterval</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#numRegionsToReport">numRegionsToReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onlineRegions">onlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionFavoredNodesMap">regionFavoredNodesMap</a>, <a href="../../../../../org/apache/hadoop/hbase
 /regionserver/HRegionServer.html#REGIONSERVER">REGIONSERVER</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionsInTransitionInRS">regionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSinkHandler">replicationSinkHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSourceHandler">replicationSourceHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcServices">rpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#secureBulkLoadManager">secureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#serverName">serverName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sleeper">sleeper</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#
 startcode">startcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tableDescriptors">tableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#TEST_SKIP_REPORTING_TRANSITION">TEST_SKIP_REPORTING_TRANSITION</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#threadWakeFrequency">threadWakeFrequency</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#useThisHostnameInstead">useThisHostnameInstead</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFactory">walFactory</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFs">walFs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRoller">walRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#zooKeeper">zooKeeper</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT">ABORT_TIMEOUT</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#ABORT_TIMEOUT_TASK">ABORT_TIMEOUT_TASK</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheConfig">cacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cacheFlusher">cacheFlusher</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterConnection">clusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterId">clusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clusterStatusTracker">clusterStatusTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#compactSplitThread">compactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/reg
 ionserver/HRegionServer.html#conf">conf</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#configurationManager">configurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#csm">csm</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executorService">executorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fs">fs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsOk">fsOk</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#fsUtilizationChore">fsUtilizationChore</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#hMemManager">hMemManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#infoServer">infoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#le
 ases">leases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#lock">lock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#MASTER_HOSTNAME_KEY">MASTER_HOSTNAME_KEY</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegions">movedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#msgInterval">msgInterval</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#numRegionsToReport">numRegionsToReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onlineRegions">onlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionFavoredNodesMap">regionFavoredNodesMap</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#REGIONSERVER">REGIONSERVER</a>, <a href="../../../../../org/apache/hadoop/hbase/regions
 erver/HRegionServer.html#regionsInTransitionInRS">regionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSinkHandler">replicationSinkHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#replicationSourceHandler">replicationSourceHandler</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#rpcServices">rpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#secureBulkLoadManager">secureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#serverName">serverName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sleeper">sleeper</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#startcode">startcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tableDescripto
 rs">tableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#TEST_SKIP_REPORTING_TRANSITION">TEST_SKIP_REPORTING_TRANSITION</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#threadWakeFrequency">threadWakeFrequency</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#useThisHostnameInstead">useThisHostnameInstead</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFactory">walFactory</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walFs">walFs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRoller">walRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#zooKeeper">zooKeeper</a></code></li>
 </ul>
 </li>
 </ul>
@@ -254,7 +254,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" tit
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#abort-java.lang.String-">abort</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addRegion-org.apache.hadoop.hbase.regionserver.HRegion-">addRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addToMovedRegions-java.lang.String-org.apache.hadoop.hbase.ServerName-long-">addToMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#checkFileSystem--">checkFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cleanMovedRegions--">cleanMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clearRegionBlockCache-org.apache.hadoop.hbase.regionserver.Region-">clearRegionBlockCache</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAllRegions-boolean-">closeAllRegions</a
 >, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionLoad-java.lang.String-">createRegionLoad</a>,
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub--">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub-boolean-">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#execRegionServerService-org.apache.hbase.thirdparty.com.google.protobuf.RpcController-org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest-">execRegionServerService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executeProcedure-long-org.apache.hadoop.hbase.procedure2.RSProcedureCallable-">executeProcedure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCacheConfig--">getCacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getChoreService--">getChoreServi
 ce</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterId--">getClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionPressure--">getCompactionPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionRequestor--">getCompactionRequestor</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactSplitThread--">getCompactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfigurationManager--">getConfigurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConnec
 tion--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getEventLoopGroupConfig--">getEventLoopGroupConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getExecutorService--">getExecutorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFavoredNodesForRegion-java.lang.String-">getFavoredNodesForRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushPressure--">getFlushPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushRequester--">getFlushRequester</a>, <a href="../../../../../org/apache/hadoop/hbase/re
 gionserver/HRegionServer.html#getFlushThroughputController--">getFlushThroughputController</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFsTableDescriptors--">getFsTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getHeapMemoryManager--">getHeapMemoryManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getInfoServer--">getInfoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLastSequenceId-byte:A-">getLastSequenceId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLeases--">getLeases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMasterAddressTracker--">getMasterAddressTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetaTableLocator--">getMetaTableLocator</a>, <a href="../../../../
 ../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetrics--">getMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMostLoadedRegions--">getMostLoadedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNonceManager--">getNonceManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNumberOfOnlineRegions--">getNumberOfOnlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegion-byte:A-">getOnlineRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegionsLocalContext--">getOnlineRegionsLocalContext</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineTables--">getOnlineTables</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-byte:A-">getRegion</a>, <a href="../..
 /../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-java.lang.String-">getRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionBlockLocations-java.lang.String-">getRegionBlockLocations</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-byte:A-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions--">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions-org.apache.hadoop.hbase.TableName-">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerAccounting--">getRegionServerAccounting</a>, <a href="../../../../../org/apache/
 hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessorHost--">getRegionServerCoprocessorHost</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessors--">getRegionServerCoprocessors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerMetrics--">getRegionServerMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerRpcQuotaManager--">getRegionServerRpcQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerSpaceQuotaManager--">getRegionServerSpaceQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionsInTransitionInRS--">getRegionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSinkService--">getReplicationSinkService</a>, <a href="../../../
 ../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSourceService--">getReplicationSourceService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRootDir--">getRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRpcServer--">getRpcServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRSRpcServices--">getRSRpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getSecureBulkLoadManager--">getSecureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getStartcode--">getStartcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getTableDescriptors--">getTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getThreadWakeFrequency--">getThreadWakeFrequency</a>, <a href="..
 /../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWAL-org.apache.hadoop.hbase.client.RegionInfo-">getWAL</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALFileSystem--">getWALFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalGroupsReplicationStatus--">getWalGroupsReplicationStatus</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalRoller--">getWalRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALRootDir--">getWALRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALs--">getWALs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#handleReportForDutyResponse-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse-">handleReportForDutyResponse</a>, <a href="../../
 ../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeMemStoreChunkCreator--">initializeMemStoreChunkCreator</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isAborted--">isAborted</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isClusterUp--">isClusterUp</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isOnline--">isOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isShutDown--">isShutDown</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopped--">isStopped</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopping--">isStopping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#kill--">kill</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegionCleanerPeriod--">mo
 vedRegionCleanerPeriod</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onConfigurationChange-org.apache.hadoop.conf.Configuration-">onConfigurationChange</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#postOpenDeployTasks-org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext-">postOpenDeployTasks</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionLock-java.util.List-java.lang.String-org.apache.hadoop.hbase.Abortable-">regionLock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#remoteProcedureComplete-long-java.lang.Throwable-">remoteProcedureComplete</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#removeRegion-org.apache.hadoop.hbase.regionserver.HRegion-org.apache.hadoop.hbase.ServerName-">removeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegi
 onServer.html#reportFileArchivalForQuotas-org.apache.hadoop.hbase.TableName-java.util.Collection-">reportFileArchivalForQuotas</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionSizesForQuotas-org.apache.hadoop.hbase.quotas.RegionSizeStore-">reportRegionSizesForQuotas</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionStateTransition-org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext-">reportRegionStateTransition</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sendShutdownInterrupt--">sendShutdownInterrupt</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupClusterConnection--">setupClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stop-java.lang.String-boolean-org.apache.hadoop.hbase.security.User-">stop</a>, <a href="../../../../
 ../org/apache/hadoop/hbase/regionserver/HRegionServer.html#toString--">toString</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#tryRegionServerReport-long-long-">tryRegionServerReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#unassign-byte:A-">unassign</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateConfiguration--">updateConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateRegionFavoredNodesMapping-java.lang.String-java.util.List-">updateRegionFavoredNodesMapping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#waitForServerOnline--">waitForServerOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRollRequestFinished--">walRollRequestFinished</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#abort-java.lang.String-">abort</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addRegion-org.apache.hadoop.hbase.regionserver.HRegion-">addRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#addToMovedRegions-java.lang.String-org.apache.hadoop.hbase.ServerName-long-">addToMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#checkFileSystem--">checkFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#cleanMovedRegions--">cleanMovedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#clearRegionBlockCache-org.apache.hadoop.hbase.regionserver.Region-">clearRegionBlockCache</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAllRegions-boolean-">closeAllRegions</a
 >, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeAndOfflineRegionForSplitOrMerge-java.util.List-">closeAndOfflineRegionForSplitOrMerge</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#closeRegion-java.lang.String-boolean-org.apache.hadoop.hbase.ServerName-">closeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#constructRegionServer-java.lang.Class-org.apache.hadoop.conf.Configuration-">constructRegionServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#convertThrowableToIOE-java.lang.Throwable-java.lang.String-">convertThrowableToIOE</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionLoad-java.lang.String-">createRegionLoad</a>,
  <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub--">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#createRegionServerStatusStub-boolean-">createRegionServerStatusStub</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#execRegionServerService-org.apache.hbase.thirdparty.com.google.protobuf.RpcController-org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest-">execRegionServerService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#executeProcedure-long-org.apache.hadoop.hbase.procedure2.RSProcedureCallable-">executeProcedure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCacheConfig--">getCacheConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getChoreService--">getChoreServi
 ce</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getClusterId--">getClusterId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionPressure--">getCompactionPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactionRequestor--">getCompactionRequestor</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactSplitThread--">getCompactSplitThread</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConfigurationManager--">getConfigurationManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getConnec
 tion--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getEventLoopGroupConfig--">getEventLoopGroupConfig</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getExecutorService--">getExecutorService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFavoredNodesForRegion-java.lang.String-">getFavoredNodesForRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushPressure--">getFlushPressure</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFlushRequester--">getFlushRequester</a>, <a href="../../../../../org/apache/hadoop/hbase/re
 gionserver/HRegionServer.html#getFlushThroughputController--">getFlushThroughputController</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getFsTableDescriptors--">getFsTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getHeapMemoryManager--">getHeapMemoryManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getInfoServer--">getInfoServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLastSequenceId-byte:A-">getLastSequenceId</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getLeases--">getLeases</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMasterAddressTracker--">getMasterAddressTracker</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getMetrics--">getMetrics</a>, <a href="../../../../../org/apache/hado
 op/hbase/regionserver/HRegionServer.html#getMostLoadedRegions--">getMostLoadedRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNonceManager--">getNonceManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getNumberOfOnlineRegions--">getNumberOfOnlineRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegion-byte:A-">getOnlineRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineRegionsLocalContext--">getOnlineRegionsLocalContext</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getOnlineTables--">getOnlineTables</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-byte:A-">getRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegion-java.lang.String-">getRegion</a>, <a href="../../../
 ../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionBlockLocations-java.lang.String-">getRegionBlockLocations</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-byte:A-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionByEncodedName-java.lang.String-">getRegionByEncodedName</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions--">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegions-org.apache.hadoop.hbase.TableName-">getRegions</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerAccounting--">getRegionServerAccounting</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessorHost--">getRegionServerCoprocessorHost</a>, <a href="../.
 ./../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerCoprocessors--">getRegionServerCoprocessors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerMetrics--">getRegionServerMetrics</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerRpcQuotaManager--">getRegionServerRpcQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionServerSpaceQuotaManager--">getRegionServerSpaceQuotaManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRegionsInTransitionInRS--">getRegionsInTransitionInRS</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSinkService--">getReplicationSinkService</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getReplicationSourceService--">getReplicationSourceService</a>, <a
  href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRootDir--">getRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRpcServer--">getRpcServer</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRSRpcServices--">getRSRpcServices</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getSecureBulkLoadManager--">getSecureBulkLoadManager</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getStartcode--">getStartcode</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getTableDescriptors--">getTableDescriptors</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getThreadWakeFrequency--">getThreadWakeFrequency</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWAL-org.apache.hadoop.hbase.client.RegionInfo-">getWAL</
 a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALFileSystem--">getWALFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalGroupsReplicationStatus--">getWalGroupsReplicationStatus</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWalRoller--">getWalRoller</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALRootDir--">getWALRootDir</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#getWALs--">getWALs</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#handleReportForDutyResponse-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse-">handleReportForDutyResponse</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#initializeMemStoreChunkCreator--">initializeMemStoreChunkCreato
 r</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isAborted--">isAborted</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isClusterUp--">isClusterUp</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isOnline--">isOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isShutDown--">isShutDown</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopped--">isStopped</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#isStopping--">isStopping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#kill--">kill</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#movedRegionCleanerPeriod--">movedRegionCleanerPeriod</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#onConfigurationChang
 e-org.apache.hadoop.conf.Configuration-">onConfigurationChange</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#postOpenDeployTasks-org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext-">postOpenDeployTasks</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#regionLock-java.util.List-java.lang.String-org.apache.hadoop.hbase.Abortable-">regionLock</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#remoteProcedureComplete-long-java.lang.Throwable-">remoteProcedureComplete</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#removeRegion-org.apache.hadoop.hbase.regionserver.HRegion-org.apache.hadoop.hbase.ServerName-">removeRegion</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportFileArchivalForQuotas-org.apache.hadoop.hbase.TableName-java.util.Collection-">reportFileArchivalForQuotas</
 a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionSizesForQuotas-org.apache.hadoop.hbase.quotas.RegionSizeStore-">reportRegionSizesForQuotas</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#reportRegionStateTransition-org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext-">reportRegionStateTransition</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#sendShutdownInterrupt--">sendShutdownInterrupt</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#setupClusterConnection--">setupClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#stop-java.lang.String-boolean-org.apache.hadoop.hbase.security.User-">stop</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#toString--">toString</a>, <a href="../../../../../org/apache/hadoop/h
 base/regionserver/HRegionServer.html#tryRegionServerReport-long-long-">tryRegionServerReport</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#unassign-byte:A-">unassign</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateConfiguration--">updateConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#updateRegionFavoredNodesMapping-java.lang.String-java.util.List-">updateRegionFavoredNodesMapping</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#waitForServerOnline--">waitForServerOnline</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#walRollRequestFinished--">walRollRequestFinished</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.util.HasThread">
@@ -282,7 +282,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/master/HMaster.html" tit
 <!--   -->
 </a>
 <h3>Methods inherited from interface&nbsp;org.apache.hadoop.hbase.<a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></h3>
-<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">getMetaTableLocator</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code></li>
+<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#createConnection-org.apache.hadoop.conf.Configuration-">createConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getChoreService--">getChoreService</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">getClusterConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">getConfiguration</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">getConnection</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getCoordinatedStateManager--">getCoordinatedStateManager</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#getFileSystem--">getFileSystem</a>, <a href="../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.hbase.Abortable">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html b/devapidocs/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
index 4e6c69c..a66f953 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
@@ -273,7 +273,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>unassignExcessMetaReplica</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html#line.93">unassignExcessMetaReplica</a>(int&nbsp;numMetaReplicasConfigured)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html#line.92">unassignExcessMetaReplica</a>(int&nbsp;numMetaReplicasConfigured)</pre>
 </li>
 </ul>
 </li>


[26/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index ea05301..26a93dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -269,3590 +269,3574 @@
 <span class="sourceLineNo">261</span>   */<a name="line.261"></a>
 <span class="sourceLineNo">262</span>  protected ClusterConnection clusterConnection;<a name="line.262"></a>
 <span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /*<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Long-living meta table locator, which is created when the server is started and stopped<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * when server shuts down. References to this locator shall be used to perform according<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * operations in EventHandlers. Primary reason for this decision is to make it mockable<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * for tests.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  protected MetaTableLocator metaTableLocator;<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * Go here to get table descriptors.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  protected TableDescriptors tableDescriptors;<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>  // Replication services. If no replication, this handler will be null.<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  // Compactions<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public CompactSplit compactSplitThread;<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Map of regions currently being served by this region server. Key is the<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * encoded region name.  All access should be synchronized.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /**<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * and here we really mean DataNode locations.<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  // Leases<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  protected Leases leases;<a name="line.303"></a>
+<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * Go here to get table descriptors.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  protected TableDescriptors tableDescriptors;<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>  // Replication services. If no replication, this handler will be null.<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  // Compactions<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public CompactSplit compactSplitThread;<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /**<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * Map of regions currently being served by this region server. Key is the<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * encoded region name.  All access should be synchronized.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.280"></a>
+<span class="sourceLineNo">281</span><a name="line.281"></a>
+<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
+<span class="sourceLineNo">283</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * and here we really mean DataNode locations.<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   */<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  // Leases<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  protected Leases leases;<a name="line.295"></a>
+<span class="sourceLineNo">296</span><a name="line.296"></a>
+<span class="sourceLineNo">297</span>  // Instance of the hbase executor executorService.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  protected ExecutorService executorService;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  // If false, the file system has become unavailable<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  protected volatile boolean fsOk;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  protected HFileSystem fs;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  protected HFileSystem walFs;<a name="line.303"></a>
 <span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>  // Instance of the hbase executor executorService.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  protected ExecutorService executorService;<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  // If false, the file system has become unavailable<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  protected volatile boolean fsOk;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  protected HFileSystem fs;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  protected HFileSystem walFs;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  // Set when a report to the master comes back with a message asking us to<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  // of HRegionServer in isolation.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private volatile boolean stopped = false;<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // debugging and unit tests.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private volatile boolean abortRequested;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  // Default abort timeout is 1200 seconds for safe<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  // Will run this task when abort timeout<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.325"></a>
+<span class="sourceLineNo">305</span>  // Set when a report to the master comes back with a message asking us to<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  // of HRegionServer in isolation.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private volatile boolean stopped = false;<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  // debugging and unit tests.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>  private volatile boolean abortRequested;<a name="line.312"></a>
+<span class="sourceLineNo">313</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  // Default abort timeout is 1200 seconds for safe<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  // Will run this task when abort timeout<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  // space regions.<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  private boolean stopping = false;<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>  volatile boolean killed = false;<a name="line.325"></a>
 <span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.327"></a>
+<span class="sourceLineNo">327</span>  private volatile boolean shutDown = false;<a name="line.327"></a>
 <span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  // space regions.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  private boolean stopping = false;<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  volatile boolean killed = false;<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  private volatile boolean shutDown = false;<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>  protected final Configuration conf;<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Path rootDir;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Path walRootDir;<a name="line.340"></a>
+<span class="sourceLineNo">329</span>  protected final Configuration conf;<a name="line.329"></a>
+<span class="sourceLineNo">330</span><a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private Path rootDir;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  private Path walRootDir;<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  final int numRetries;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  protected final int threadWakeFrequency;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  protected final int msgInterval;<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  protected final int numRegionsToReport;<a name="line.340"></a>
 <span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  final int numRetries;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  protected final int threadWakeFrequency;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected final int msgInterval;<a name="line.346"></a>
+<span class="sourceLineNo">342</span>  // Stub to do region server status calls against the master.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  RpcClient rpcClient;<a name="line.346"></a>
 <span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  protected final int numRegionsToReport;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  // Stub to do region server status calls against the master.<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  RpcClient rpcClient;<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.357"></a>
+<span class="sourceLineNo">348</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  // into web context.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  protected InfoServer infoServer;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  private JvmPauseMonitor pauseMonitor;<a name="line.357"></a>
 <span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  // into web context.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  protected InfoServer infoServer;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  private JvmPauseMonitor pauseMonitor;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** region server process name */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String REGIONSERVER = "regionserver";<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  MetricsRegionServer metricsRegionServer;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  MetricsTable metricsTable;<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  private SpanReceiverHost spanReceiverHost;<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private ChoreService choreService;<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /*<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Check for compactions requests.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  ScheduledChore compactionChecker;<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /*<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * Check for flushes<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  ScheduledChore periodicFlusher;<a name="line.387"></a>
-<span class="sourceLineNo">388</span><a name="line.388"></a>
-<span class="sourceLineNo">389</span>  protected volatile WALFactory walFactory;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // WAL roller. log is protected rather than private to avoid<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // eclipse warning when accessed by inner classes<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  protected LogRoller walRoller;<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // A thread which calls reportProcedureDone<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // flag set after we're done setting up server threads<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // zookeeper connection and watcher<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  protected final ZKWatcher zooKeeper;<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // master address tracker<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // Cluster Status Tracker<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  // Log Splitting Worker<a name="line.410"></a>
-<span class="sourceLineNo">411</span>  private SplitLogWorker splitLogWorker;<a name="line.411"></a>
+<span class="sourceLineNo">359</span>  /** region server process name */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public static final String REGIONSERVER = "regionserver";<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  MetricsRegionServer metricsRegionServer;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  MetricsTable metricsTable;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>  private SpanReceiverHost spanReceiverHost;<a name="line.364"></a>
+<span class="sourceLineNo">365</span><a name="line.365"></a>
+<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private ChoreService choreService;<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /*<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Check for compactions requests.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  ScheduledChore compactionChecker;<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  /*<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * Check for flushes<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   */<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  ScheduledChore periodicFlusher;<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  protected volatile WALFactory walFactory;<a name="line.381"></a>
+<span class="sourceLineNo">382</span><a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // WAL roller. log is protected rather than private to avoid<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // eclipse warning when accessed by inner classes<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected LogRoller walRoller;<a name="line.385"></a>
+<span class="sourceLineNo">386</span><a name="line.386"></a>
+<span class="sourceLineNo">387</span>  // A thread which calls reportProcedureDone<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.388"></a>
+<span class="sourceLineNo">389</span><a name="line.389"></a>
+<span class="sourceLineNo">390</span>  // flag set after we're done setting up server threads<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // zookeeper connection and watcher<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  protected final ZKWatcher zooKeeper;<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // master address tracker<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.397"></a>
+<span class="sourceLineNo">398</span><a name="line.398"></a>
+<span class="sourceLineNo">399</span>  // Cluster Status Tracker<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // Log Splitting Worker<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  private SplitLogWorker splitLogWorker;<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // A sleeper that sleeps for msgInterval.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  protected final Sleeper sleeper;<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private final int operationTimeout;<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  private final int shortOperationTimeout;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.411"></a>
 <span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>  // A sleeper that sleeps for msgInterval.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  protected final Sleeper sleeper;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  private final int operationTimeout;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  private final int shortOperationTimeout;<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.419"></a>
+<span class="sourceLineNo">413</span>  // Cache configuration and block cache reference<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  protected CacheConfig cacheConfig;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  // Cache configuration for mob<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  final MobCacheConfig mobCacheConfig;<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** The health check chore. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  private HealthCheckChore healthCheckChore;<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  // Cache configuration and block cache reference<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  protected CacheConfig cacheConfig;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  // Cache configuration for mob<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  final MobCacheConfig mobCacheConfig;<a name="line.424"></a>
+<span class="sourceLineNo">421</span>  /** The nonce manager chore. */<a name="line.421"></a>
+<span class="sourceLineNo">422</span>  private ScheduledChore nonceManagerChore;<a name="line.422"></a>
+<span class="sourceLineNo">423</span><a name="line.423"></a>
+<span class="sourceLineNo">424</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.424"></a>
 <span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>  /** The health check chore. */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>  private HealthCheckChore healthCheckChore;<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>  /** The nonce manager chore. */<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  private ScheduledChore nonceManagerChore;<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * against  Master.<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected ServerName serverName;<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /*<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * hostname specified by hostname config<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  protected String useThisHostnameInstead;<a name="line.444"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * against  Master.<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   */<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  protected ServerName serverName;<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /*<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * hostname specified by hostname config<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  protected String useThisHostnameInstead;<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  // key to the config parameter of server hostname<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  // both master and region server<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.441"></a>
+<span class="sourceLineNo">442</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.444"></a>
 <span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  // key to the config parameter of server hostname<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  // both master and region server<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  // Exception will be thrown if both are used.<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * This servers startcode.<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  protected final long startcode;<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * Unique identifier for the cluster we are a part of.<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  protected String clusterId;<a name="line.467"></a>
+<span class="sourceLineNo">446</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  // Exception will be thrown if both are used.<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * This servers startcode.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  protected final long startcode;<a name="line.454"></a>
+<span class="sourceLineNo">455</span><a name="line.455"></a>
+<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
+<span class="sourceLineNo">457</span>   * Unique identifier for the cluster we are a part of.<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   */<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  protected String clusterId;<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  /**<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * Chore to clean periodically the moved region list<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>  // chore for refreshing store files for secondary regions<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  private StorefileRefresherChore storefileRefresher;<a name="line.467"></a>
 <span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Chore to clean periodically the moved region list<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  // chore for refreshing store files for secondary regions<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  private StorefileRefresherChore storefileRefresher;<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  private RegionServerCoprocessorHost rsHost;<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>  /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * HBASE-3787) are:<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   */<a name="line.501"></a>
-<span class="sourceLineNo">502</span>  final ServerNonceManager nonceManager;<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>  private UserProvider userProvider;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  protected final RSRpcServices rpcServices;<a name="line.506"></a>
+<span class="sourceLineNo">469</span>  private RegionServerCoprocessorHost rsHost;<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /**<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   * HBASE-3787) are:<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.484"></a>
+<span class="sourceLineNo">485</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.485"></a>
+<span class="sourceLineNo">486</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  final ServerNonceManager nonceManager;<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  private UserProvider userProvider;<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>  protected final RSRpcServices rpcServices;<a name="line.498"></a>
+<span class="sourceLineNo">499</span><a name="line.499"></a>
+<span class="sourceLineNo">500</span>  protected CoordinatedStateManager csm;<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>  /**<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  protected final ConfigurationManager configurationManager;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  protected CoordinatedStateManager csm;<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   */<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  protected final ConfigurationManager configurationManager;<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.517"></a>
+<span class="sourceLineNo">508</span>  @VisibleForTesting<a name="line.508"></a>
+<span class="sourceLineNo">509</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.509"></a>
+<span class="sourceLineNo">510</span><a name="line.510"></a>
+<span class="sourceLineNo">511</span>  private volatile ThroughputController flushThroughputController;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.513"></a>
+<span class="sourceLineNo">514</span><a name="line.514"></a>
+<span class="sourceLineNo">515</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.515"></a>
+<span class="sourceLineNo">516</span><a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.517"></a>
 <span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  private volatile ThroughputController flushThroughputController;<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   */<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private final boolean masterless;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  /**<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * Starts a HRegionServer at the default location<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  // Don't start any services or managers in here in the Constructor.<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    super("RegionServer");  // thread name<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TraceUtil.initTracer(conf);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    try {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      this.startcode = System.currentTimeMillis();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      this.conf = conf;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.fsOk = true;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      HFile.checkHFileVersion(this.conf);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      checkCodecs(this.conf);<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.554"></a>
+<span class="sourceLineNo">519</span>  /**<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.523"></a>
+<span class="sourceLineNo">524</span>   */<a name="line.524"></a>
+<span class="sourceLineNo">525</span>  private final boolean masterless;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>   * Starts a HRegionServer at the default location<a name="line.529"></a>
+<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>  // Don't start any services or managers in here in the Constructor.<a name="line.531"></a>
+<span class="sourceLineNo">532</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    super("RegionServer");  // thread name<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    TraceUtil.initTracer(conf);<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    try {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      this.startcode = System.currentTimeMillis();<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      this.conf = conf;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>      this.fsOk = true;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      HFile.checkHFileVersion(this.conf);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      checkCodecs(this.conf);<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.546"></a>
+<span class="sourceLineNo">547</span><a name="line.547"></a>
+<span class="sourceLineNo">548</span>      // Disable usage of meta replicas in the regionserver<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      // Config'ed params<a name="line.550"></a>
+<span class="sourceLineNo">551</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.553"></a>
+<span class="sourceLineNo">554</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.554"></a>
 <span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>      // Disable usage of meta replicas in the regionserver<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      // Config'ed params<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.564"></a>
+<span class="sourceLineNo">556</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.556"></a>
+<span class="sourceLineNo">557</span><a name="line.557"></a>
+<span class="sourceLineNo">558</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.561"></a>
+<span class="sourceLineNo">562</span><a name="line.562"></a>
+<span class="sourceLineNo">563</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.564"></a>
 <span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.567"></a>
+<span class="sourceLineNo">566</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.566"></a>
+<span class="sourceLineNo">567</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.567"></a>
 <span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.571"></a>
-<span class="sourceLineNo">572</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.575"></a>
-<span class="sourceLineNo">576</span><a name="line.576"></a>
-<span class="sourceLineNo">577</span>      this.abortRequested = false;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      this.stopped = false;<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      rpcServices = createRpcServices();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String hostName =<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.583"></a>
-<span class="sourceLineNo">584</span>              : this.useThisHostnameInstead;<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // login the zookeeper client principal (if using security)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      // login the server principal (if using secure Hadoop)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      login(userProvider, hostName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
-<span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
-<span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
-<span class="sourceLineNo">608</span><a name="line.608"></a>
-<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
-<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
-<span class="sourceLineNo">633</span><a name="line.633"></a>
-<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
-<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
-<span class="sourceLineNo">661</span><a name="line.661"></a>
-<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span><a name="line.733"></a>
-<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
-<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
-<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
-<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">569</span>      this.abortRequested = false;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      this.stopped = false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span><a name="line.571"></a>
+<span class="sourceLineNo">572</span>      rpcServices = createRpcServices();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      String hostName =<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.575"></a>
+<span class="sourceLineNo">576</span>              : this.useThisHostnameInstead;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.579"></a>
+<span class="sourceLineNo">580</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>      // login the zookeeper client principal (if using security)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.583"></a>
+<span class="sourceLineNo">584</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.584"></a>
+<span class="sourceLineNo">585</span>      // login the server principal (if using secure Hadoop)<a name="line.585"></a>
+<span class="sourceLineNo">586</span>      login(userProvider, hostName);<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      // init superusers and add the server principal (if using security)<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      // or process owner as default super user.<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      Superusers.initialize(conf);<a name="line.589"></a>
+<span class="sourceLineNo">590</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.590"></a>
+<span class="sourceLineNo">591</span><a name="line.591"></a>
+<span class="sourceLineNo">592</span>      boolean isMasterNotCarryTable =<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>      // no need to instantiate global block cache when master not carry table<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      if (!isMasterNotCarryTable) {<a name="line.595"></a>
+<span class="sourceLineNo">596</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      }<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      cacheConfig = new CacheConfig(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.599"></a>
+<span class="sourceLineNo">600</span><a name="line.600"></a>
+<span class="sourceLineNo">601</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.601"></a>
+<span class="sourceLineNo">602</span>        @Override<a name="line.602"></a>
+<span class="sourceLineNo">603</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      };<a name="line.606"></a>
+<span class="sourceLineNo">607</span><a name="line.607"></a>
+<span class="sourceLineNo">608</span>      initializeFileSystem();<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.609"></a>
+<span class="sourceLineNo">610</span><a name="line.610"></a>
+<span class="sourceLineNo">611</span>      this.configurationManager = new ConfigurationManager();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.612"></a>
+<span class="sourceLineNo">613</span><a name="line.613"></a>
+<span class="sourceLineNo">614</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        // Open connection to zookeeper and set primary watcher<a name="line.616"></a>
+<span class="sourceLineNo">617</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        if (!this.masterless) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.621"></a>
+<span class="sourceLineNo">622</span><a name="line.622"></a>
+<span class="sourceLineNo">623</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          masterAddressTracker.start();<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          clusterStatusTracker.start();<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        } else {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          masterAddressTracker = null;<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          clusterStatusTracker = null;<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        }<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        zooKeeper = null;<a name="line.633"></a>
+<span class="sourceLineNo">634</span>        masterAddressTracker = null;<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        clusterStatusTracker = null;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      this.rpcServices.start(zooKeeper);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      // class HRS. TODO.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      this.choreService = new ChoreService(getName(), true);<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      this.executorService = new ExecutorService(getName());<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      putUpWebUI();<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    } catch (Throwable t) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // cause of failed startup is lost.<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      LOG.error("Failed construction RegionServer", t);<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      throw t;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
+<span class="sourceLineNo">653</span><a name="line.653"></a>
+<span class="sourceLineNo">654</span>  // HMaster should override this method to load the specific config for master<a name="line.654"></a>
+<span class="sourceLineNo">655</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.659"></a>
+<span class="sourceLineNo">660</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.660"></a>
+<span class="sourceLineNo">661</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.661"></a>
+<span class="sourceLineNo">662</span>        throw new IOException(msg);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>      } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>        return rpcServices.isa.getHostName();<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      return hostname;<a name="line.667"></a>
+<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
+<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
+<span class="sourceLineNo">670</span><a name="line.670"></a>
+<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>   * If running on Windows, do windows-specific setup.<a name="line.672"></a>
+<span class="sourceLineNo">673</span>   */<a name="line.673"></a>
+<span class="sourceLineNo">674</span>  private s

<TRUNCATED>

[12/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()),

<TRUNCATED>

[30/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
index 2cdee19..e6bc675 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterRpcServices.html
@@ -110,2406 +110,2407 @@
 <span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.util.ForeignExceptionUtil;<a name="line.102"></a>
 <span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.103"></a>
 <span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.zookeeper.KeeperException;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.slf4j.Logger;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.slf4j.LoggerFactory;<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;<a name="line.181"></a>
-<span class="sourceLineNo">182</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;<a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.HbckService;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest;<a name="line.195"></a>
-<span class="sourceLineNo">196</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;<a name="line.197"></a>
-<span class="sourceLineNo">198</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;<a name="line.198"></a>
-<span class="sourceLineNo">199</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;<a name="line.201"></a>
-<span class="sourceLineNo">202</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;<a name="line.203"></a>
-<span class="sourceLineNo">204</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;<a name="line.204"></a>
-<span class="sourceLineNo">205</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersRequest;<a name="line.205"></a>
-<span class="sourceLineNo">206</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersResponse;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;<a name="line.207"></a>
-<span class="sourceLineNo">208</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;<a name="line.208"></a>
-<span class="sourceLineNo">209</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;<a name="line.210"></a>
-<span class="sourceLineNo">211</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;<a name="line.211"></a>
-<span class="sourceLineNo">212</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse;<a name="line.212"></a>
-<span class="sourceLineNo">213</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;<a name="line.213"></a>
-<span class="sourceLineNo">214</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;<a name="line.216"></a>
-<span class="sourceLineNo">217</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest;<a name="line.217"></a>
-<span class="sourceLineNo">218</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest;<a name="line.223"></a>
-<span class="sourceLineNo">224</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse;<a name="line.224"></a>
-<span class="sourceLineNo">225</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse;<a name="line.226"></a>
-<span class="sourceLineNo">227</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest;<a name="line.227"></a>
-<span class="sourceLineNo">228</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest;<a name="line.229"></a>
-<span class="sourceLineNo">230</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerRequest;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerResponse;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse;<a name="line.234"></a>
-<span class="sourceLineNo">235</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest;<a name="line.237"></a>
-<span class="sourceLineNo">238</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;<a name="line.239"></a>
-<span class="sourceLineNo">240</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;<a name="line.240"></a>
-<span class="sourceLineNo">241</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest;<a name="line.243"></a>
-<span class="sourceLineNo">244</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse;<a name="line.244"></a>
-<span class="sourceLineNo">245</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;<a name="line.245"></a>
-<span class="sourceLineNo">246</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetTableStateInMetaRequest;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;<a name="line.261"></a>
-<span class="sourceLineNo">262</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;<a name="line.262"></a>
-<span class="sourceLineNo">263</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest;<a name="line.264"></a>
-<span class="sourceLineNo">265</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest;<a name="line.268"></a>
-<span class="sourceLineNo">269</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse;<a name="line.269"></a>
-<span class="sourceLineNo">270</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationRequest;<a name="line.271"></a>
-<span class="sourceLineNo">272</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationResponse;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest;<a name="line.273"></a>
-<span class="sourceLineNo">274</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;<a name="line.275"></a>
-<span class="sourceLineNo">276</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RemoteProcedureResult;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneRequest;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportProcedureDoneResponse;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse;<a name="line.287"></a>
-<span class="sourceLineNo">288</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;<a name="line.288"></a>
-<span class="sourceLineNo">289</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest;<a name="line.290"></a>
-<span class="sourceLineNo">291</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse;<a name="line.291"></a>
-<span class="sourceLineNo">292</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest;<a name="line.292"></a>
-<span class="sourceLineNo">293</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse;<a name="line.293"></a>
-<span class="sourceLineNo">294</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse;<a name="line.295"></a>
-<span class="sourceLineNo">296</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest;<a name="line.296"></a>
-<span class="sourceLineNo">297</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest;<a name="line.298"></a>
-<span class="sourceLineNo">299</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse;<a name="line.299"></a>
-<span class="sourceLineNo">300</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationState;<a name="line.302"></a>
-<span class="sourceLineNo">303</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateRequest;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TransitReplicationPeerSyncReplicationStateResponse;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest;<a name="line.305"></a>
-<span class="sourceLineNo">306</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse;<a name="line.306"></a>
-<span class="sourceLineNo">307</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>/**<a name="line.309"></a>
-<span class="sourceLineNo">310</span> * Implements the master RPC services.<a name="line.310"></a>
-<span class="sourceLineNo">311</span> */<a name="line.311"></a>
-<span class="sourceLineNo">312</span>@InterfaceAudience.Private<a name="line.312"></a>
-<span class="sourceLineNo">313</span>@SuppressWarnings("deprecation")<a name="line.313"></a>
-<span class="sourceLineNo">314</span>public class MasterRpcServices extends RSRpcServices<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      implements MasterService.BlockingInterface, RegionServerStatusService.BlockingInterface,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>        LockService.BlockingInterface, HbckService.BlockingInterface {<a name="line.316"></a>
-<span class="sourceLineNo">317</span>  private static final Logger LOG = LoggerFactory.getLogger(MasterRpcServices.class.getName());<a name="line.317"></a>
-<span class="sourceLineNo">318</span><a name="line.318"></a>
-<span class="sourceLineNo">319</span>  private final HMaster master;<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @return Subset of configuration to pass initializing regionservers: e.g.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * the filesystem to use and root directory to use.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   */<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  private RegionServerStartupResponse.Builder createConfigurationSubset() {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    RegionServerStartupResponse.Builder resp = addConfig(<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      RegionServerStartupResponse.newBuilder(), HConstants.HBASE_DIR);<a name="line.327"></a>
-<span class="sourceLineNo">328</span>    resp = addConfig(resp, "fs.defaultFS");<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    return addConfig(resp, "hbase.master.info.port");<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private RegionServerStartupResponse.Builder addConfig(<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      final RegionServerStartupResponse.Builder resp, final String key) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    NameStringPair.Builder entry = NameStringPair.newBuilder()<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      .setName(key)<a name="line.335"></a>
-<span class="sourceLineNo">336</span>      .setValue(master.getConfiguration().get(key));<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    resp.addMapEntries(entry.build());<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    return resp;<a name="line.338"></a>
-<span class="sourceLineNo">339</span>  }<a name="line.339"></a>
-<span class="sourceLineNo">340</span><a name="line.340"></a>
-<span class="sourceLineNo">341</span>  public MasterRpcServices(HMaster m) throws IOException {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    super(m);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    master = m;<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  @Override<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  protected Class&lt;?&gt; getRpcSchedulerFactoryClass() {<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    Configuration conf = getConfiguration();<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    if (conf != null) {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>      return conf.getClass(MASTER_RPC_SCHEDULER_FACTORY_CLASS, super.getRpcSchedulerFactoryClass());<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    } else {<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      return super.getRpcSchedulerFactoryClass();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    }<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  @Override<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  protected RpcServerInterface createRpcServer(Server server, Configuration conf,<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      RpcSchedulerFactory rpcSchedulerFactory, InetSocketAddress bindAddress, String name)<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      throws IOException {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    // RpcServer at HM by default enable ByteBufferPool iff HM having user table region in it<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    boolean reservoirEnabled = conf.getBoolean(RESERVOIR_ENABLED_KEY,<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        LoadBalancer.isMasterCanHostUserRegions(conf));<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    try {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      return RpcServerFactory.createRpcServer(server, name, getServices(),<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          bindAddress, // use final bindAddress for this server.<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          conf, rpcSchedulerFactory.create(conf, this, server), reservoirEnabled);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    } catch (BindException be) {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      throw new IOException(be.getMessage() + ". To switch ports use the '"<a name="line.368"></a>
-<span class="sourceLineNo">369</span>          + HConstants.MASTER_PORT + "' configuration property.",<a name="line.369"></a>
-<span class="sourceLineNo">370</span>          be.getCause() != null ? be.getCause() : be);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    }<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  @Override<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  protected PriorityFunction createPriority() {<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    return new MasterAnnotationReadingPriorityFunction(this);<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  }<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /**<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Checks for the following pre-checks in order:<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   * &lt;ol&gt;<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   *   &lt;li&gt;Master is initialized&lt;/li&gt;<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   *   &lt;li&gt;Rpc caller has admin permissions&lt;/li&gt;<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * &lt;/ol&gt;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @param requestName name of rpc request. Used in reporting failures to provide context.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * @throws ServiceException If any of the above listed pre-check fails.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  private void rpcPreCheck(String requestName) throws ServiceException {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    try {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      master.checkInitialized();<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      requirePermission(requestName, Permission.Action.ADMIN);<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    } catch (IOException ioe) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      throw new ServiceException(ioe);<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    }<a name="line.394"></a>
-<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>  enum BalanceSwitchMode {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    SYNC,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    ASYNC<a name="line.399"></a>
-<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>  /**<a name="line.402"></a>
-<span class="sourceLineNo">403</span>   * Assigns balancer switch according to BalanceSwitchMode<a name="line.403"></a>
-<span class="sourceLineNo">404</span>   * @param b new balancer switch<a name="line.404"></a>
-<span class="sourceLineNo">405</span>   * @param mode BalanceSwitchMode<a name="line.405"></a>
-<span class="sourceLineNo">406</span>   * @return old balancer switch<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   */<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  boolean switchBalancer(final boolean b, BalanceSwitchMode mode) throws IOException {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    boolean oldValue = master.loadBalancerTracker.isBalancerOn();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    boolean newValue = b;<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    try {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      if (master.cpHost != null) {<a name="line.412"></a>
-<span class="sourceLineNo">413</span>        master.cpHost.preBalanceSwitch(newValue);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>      }<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      try {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>        if (mode == BalanceSwitchMode.SYNC) {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>          synchronized (master.getLoadBalancer()) {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>            master.loadBalancerTracker.setBalancerOn(newValue);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>          }<a name="line.419"></a>
-<span class="sourceLineNo">420</span>        } else {<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          master.loadBalancerTracker.setBalancerOn(newValue);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        }<a name="line.422"></a>
-<span class="sourceLineNo">423</span>      } catch (KeeperException ke) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        throw new IOException(ke);<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      }<a name="line.425"></a>
-<span class="sourceLineNo">426</span>      LOG.info(master.getClientIdAuditPrefix() + " set balanceSwitch=" + newValue);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      if (master.cpHost != null) {<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        master.cpHost.postBalanceSwitch(oldValue, newValue);<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      }<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      master.getLoadBalancer().updateBalancerStatus(newValue);<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    } catch (IOException ioe) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      LOG.warn("Error flipping balance switch", ioe);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    return oldValue;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>  boolean synchronousBalanceSwitch(final boolean b) throws IOException {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>    return switchBalancer(b, BalanceSwitchMode.SYNC);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  }<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /**<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * @return list of blocking services and their security info classes that this server supports<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  @Override<a name="line.444"></a>
-<span class="sourceLineNo">445</span>  protected List&lt;BlockingServiceAndInterface&gt; getServices() {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>    List&lt;BlockingServiceAndInterface&gt; bssi = new ArrayList&lt;&gt;(5);<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    bssi.add(new BlockingServiceAndInterface(<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      MasterService.newReflectiveBlockingService(this),<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      MasterService.BlockingInterface.class));<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    bssi.add(new BlockingServiceAndInterface(<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      RegionServerStatusService.newReflectiveBlockingService(this),<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      RegionServerStatusService.BlockingInterface.class));<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    bssi.add(new BlockingServiceAndInterface(LockService.newReflectiveBlockingService(this),<a name="line.453"></a>
-<span class="sourceLineNo">454</span>        LockService.BlockingInterface.class));<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    bssi.add(new BlockingServiceAndInterface(HbckService.newReflectiveBlockingService(this),<a name="line.455"></a>
-<span class="sourceLineNo">456</span>        HbckService.BlockingInterface.class));<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    bssi.addAll(super.getServices());<a name="line.457"></a>
-<span class="sourceLineNo">458</span>    return bssi;<a name="line.458"></a>
-<span class="sourceLineNo">459</span>  }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>  @Override<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  @QosPriority(priority = HConstants.ADMIN_QOS)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  public GetLastFlushedSequenceIdResponse getLastFlushedSequenceId(RpcController controller,<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      GetLastFlushedSequenceIdRequest request) throws ServiceException {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    try {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>      master.checkServiceStarted();<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    } catch (IOException ioe) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      throw new ServiceException(ioe);<a name="line.468"></a>
-<span class="sourceLineNo">469</span>    }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    byte[] encodedRegionName = request.getRegionName().toByteArray();<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    RegionStoreSequenceIds ids = master.getServerManager()<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      .getLastFlushedSequenceId(encodedRegionName);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    return ResponseConverter.buildGetLastFlushedSequenceIdResponse(ids);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  }<a name="line.474"></a>
-<span class="sourceLineNo">475</span><a name="line.475"></a>
-<span class="sourceLineNo">476</span>  @Override<a name="line.476"></a>
-<span class="sourceLineNo">477</span>  public RegionServerReportResponse regionServerReport(RpcController controller,<a name="line.477"></a>
-<span class="sourceLineNo">478</span>      RegionServerReportRequest request) throws ServiceException {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    try {<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      master.checkServiceStarted();<a name="line.480"></a>
-<span class="sourceLineNo">481</span>      int versionNumber = 0;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      String version = "0.0.0";<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      VersionInfo versionInfo = VersionInfoUtil.getCurrentClientVersionInfo();<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      if (versionInfo != null) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        version = versionInfo.getVersion();<a name="line.485"></a>
-<span class="sourceLineNo">486</span>        versionNumber = VersionInfoUtil.getVersionNumber(versionInfo);<a name="line.486"></a>
-<span class="sourceLineNo">487</span>      }<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      ClusterStatusProtos.ServerLoad sl = request.getLoad();<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      ServerName serverName = ProtobufUtil.toServerName(request.getServer());<a name="line.489"></a>
-<span class="sourceLineNo">490</span>      ServerMetrics oldLoad = master.getServerManager().getLoad(serverName);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      ServerMetrics newLoad =<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        ServerMetricsBuilder.toServerMetrics(serverName, versionNumber, version, sl);<a name="line.492"></a>
-<span class="sourceLineNo">493</span>      master.getServerManager().regionServerReport(serverName, newLoad);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      master.getAssignmentManager().reportOnlineRegions(serverName,<a name="line.494"></a>
-<span class="sourceLineNo">495</span>        newLoad.getRegionMetrics().keySet());<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      if (sl != null &amp;&amp; master.metricsMaster != null) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>        // Up our metrics.<a name="line.497"></a>
-<span class="sourceLineNo">498</span>        master.metricsMaster.incrementRequests(<a name="line.498"></a>
-<span class="sourceLineNo">499</span>          sl.getTotalNumberOfRequests() - (oldLoad != null ? oldLoad.getRequestCount() : 0));<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      }<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    } catch (IOException ioe) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      throw new ServiceException(ioe);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    }<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    return RegionServerReportResponse.newBuilder().build();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>  }<a name="line.505"></a>
-<span class="sourceLineNo">506</span><a name="line.506"></a>
-<span class="sourceLineNo">507</span>  @Override<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  public RegionServerStartupResponse regionServerStartup(RpcController controller,<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      RegionServerStartupRequest request) throws ServiceException {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    // Register with server manager<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    try {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      master.checkServiceStarted();<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      int versionNumber = 0;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      String version = "0.0.0";<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      VersionInfo versionInfo = VersionInfoUtil.getCurrentClientVersionInfo();<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      if (versionInfo != null) {<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        version = versionInfo.getVersion();<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        versionNumber = VersionInfoUtil.getVersionNumber(versionInfo);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      InetAddress ia = master.getRemoteInetAddress(request.getPort(), request.getServerStartCode());<a name="line.520"></a>
-<span class="sourceLineNo">521</span>      // if regionserver passed hostname to use,<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      // then use it instead of doing a reverse DNS lookup<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      ServerName rs =<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        master.getServerManager().regionServerStartup(request, versionNumber, version, ia);<a name="line.524"></a>
-<span class="sourceLineNo">525</span><a name="line.525"></a>
-<span class="sourceLineNo">526</span>      // Send back some config info<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      RegionServerStartupResponse.Builder resp = createConfigurationSubset();<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      NameStringPair.Builder entry = NameStringPair.newBuilder()<a name="line.528"></a>
-<span class="sourceLineNo">529</span>        .setName(HConstants.KEY_FOR_HOSTNAME_SEEN_BY_MASTER).setValue(rs.getHostname());<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      resp.addMapEntries(entry.build());<a name="line.530"></a>
-<span class="sourceLineNo">531</span><a name="line.531"></a>
-<span class="sourceLineNo">532</span>      return resp.build();<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    } catch (IOException ioe) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>      throw new ServiceException(ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  }<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>  @Override<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  public ReportRSFatalErrorResponse reportRSFatalError(<a name="line.539"></a>
-<span class="sourceLineNo">540</span>      RpcController controller, ReportRSFatalErrorRequest request) throws ServiceException {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    String errorText = request.getErrorMessage();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    ServerName sn = ProtobufUtil.toServerName(request.getServer());<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    String msg = sn + " reported a fatal error:\n" + errorText;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    LOG.warn(msg);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    master.rsFatals.add(msg);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    return ReportRSFatalErrorResponse.newBuilder().build();<a name="line.546"></a>
-<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>  @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>  public AddColumnResponse addColumn(RpcController controller,<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      AddColumnRequest req) throws ServiceException {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>    try {<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      long procId = master.addColumn(<a name="line.553"></a>
-<span class="sourceLineNo">554</span>          ProtobufUtil.toTableName(req.getTableName()),<a name="line.554"></a>
-<span class="sourceLineNo">555</span>          ProtobufUtil.toColumnFamilyDescriptor(req.getColumnFamilies()),<a name="line.555"></a>
-<span class="sourceLineNo">556</span>          req.getNonceGroup(),<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          req.getNonce());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      if (procId == -1) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        // This mean operation was not performed in server, so do not set any procId<a name="line.559"></a>
-<span class="sourceLineNo">560</span>        return AddColumnResponse.newBuilder().build();<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      } else {<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        return AddColumnResponse.newBuilder().setProcId(procId).build();<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      }<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    } catch (IOException ioe) {<a name="line.564"></a>
-<span class="sourceLineNo">565</span>      throw new ServiceException(ioe);<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    }<a name="line.566"></a>
-<span class="sourceLineNo">567</span>  }<a name="line.567"></a>
-<span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>  @Override<a name="line.569"></a>
-<span class="sourceLineNo">570</span>  public AssignRegionResponse assignRegion(RpcController controller,<a name="line.570"></a>
-<span class="sourceLineNo">571</span>      AssignRegionRequest req) throws ServiceException {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    try {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>      master.checkInitialized();<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>      final RegionSpecifierType type = req.getRegion().getType();<a name="line.575"></a>
-<span class="sourceLineNo">576</span>      if (type != RegionSpecifierType.REGION_NAME) {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>        LOG.warn("assignRegion specifier type: expected: " + RegionSpecifierType.REGION_NAME<a name="line.577"></a>
-<span class="sourceLineNo">578</span>          + " actual: " + type);<a name="line.578"></a>
-<span class="sourceLineNo">579</span>      }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>      final byte[] regionName = req.getRegion().getValue().toByteArray();<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      final RegionInfo regionInfo = master.getAssignmentManager().getRegionInfo(regionName);<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      if (regionInfo == null) throw new UnknownRegionException(Bytes.toStringBinary(regionName));<a name="line.583"></a>
-<span class="sourceLineNo">584</span><a name="line.584"></a>
-<span class="sourceLineNo">585</span>      final AssignRegionResponse arr = AssignRegionResponse.newBuilder().build();<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      if (master.cpHost != null) {<a name="line.586"></a>
-<span class="sourceLineNo">587</span>        master.cpHost.preAssign(regionInfo);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      }<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      LOG.info(master.getClientIdAuditPrefix() + " assign " + regionInfo.getRegionNameAsString());<a name="line.589"></a>
-<span class="sourceLineNo">590</span>      master.getAssignmentManager().assign(regionInfo);<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      if (master.cpHost != null) {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>        master.cpHost.postAssign(regionInfo);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      }<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      return arr;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    } catch (IOException ioe) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      throw new ServiceException(ioe);<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    }<a name="line.597"></a>
-<span class="sourceLineNo">598</span>  }<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.zookeeper.KeeperException;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.slf4j.Logger;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.slf4j.LoggerFactory;<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.113"></a>
+<span class="sourceLineNo">114</span><a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersRequest;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ClearDeadServersResponse;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersRequest;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DecommissionRegionServersResponse;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;<a name="line.180"></a>
+<span class="sourceLineNo">181</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.HbckService;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest;<a name="line.190"></a>
+<span class="sourceLineNo">191</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse;<a name="line.191"></a>
+<span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse;<a name="line.197"></a>
+<span class="sourceLineNo">198</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;<a name="line.198"></a>
+<span class="sourceLineNo">199</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;<a name="line.201"></a>
+<span class="sourceLineNo">202</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;<a name="line.202"></a>
+<span class="sourceLineNo">203</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;<a name="line.203"></a>
+<span class="sourceLineNo">204</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest;<a name="line.204"></a>
+<span class="sourceLineNo">205</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse;<a name="line.205"></a>
+<span class="sourceLineNo">206</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersRequest;<a name="line.206"></a>
+<span class="sourceLineNo">207</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDecommissionedRegionServersResponse;<a name="line.207"></a>
+<span class="sourceLineNo">208</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;<a name="line.208"></a>
+<span class="sourceLineNo">209</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;<a name="line.209"></a>
+<span class="sourceLineNo">210</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;<a name="line.211"></a>
+<span class="sourceLineNo">212</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse;<a name="line.213"></a>
+<span class="sourceLineNo">214</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;<a name="line.214"></a>
+<span class="sourceLineNo">215</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;<a name="line.215"></a>
+<span class="sourceLineNo">216</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest;<a name="line.218"></a>
+<span class="sourceLineNo">219</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;<a name="line.222"></a>
+<span class="sourceLineNo">223</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse;<a name="line.223"></a>
+<span class="sourceLineNo">224</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest;<a name="line.224"></a>
+<span class="sourceLineNo">225</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest;<a name="line.226"></a>
+<span class="sourceLineNo">227</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest;<a name="line.228"></a>
+<span class="sourceLineNo">229</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest;<a name="line.230"></a>
+<span class="sourceLineNo">231</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse;<a name="line.231"></a>
+<span class="sourceLineNo">232</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerRequest;<a name="line.232"></a>
+<span class="sourceLineNo">233</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RecommissionRegionServerResponse;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest;<a name="line.234"></a>
+<span class="sourceLineNo">235</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse;<a name="line.235"></a>
+<span class="sourceLineNo">236</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest;<a name="line.238"></a>
+<span class="sourceLineNo">239</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse;<a name="line.239"></a>
+<span class="sourceLineNo">240</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;<a name="line.240"></a>
+<span class="sourceLineNo">241</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;<a name="line.241"></a>
+<span class="sourceLineNo">242</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest;<a name="line.242"></a>
+<span class="sourceLineNo">243</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse;<a name="line.243"></a>
+<span class="sourceLineNo">244</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest;<a name="line.244"></a>
+<span class="sourceLineNo">245</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse;<a name="line.251"></a>
+<span class="sourceLineNo">252</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetTableStateInMetaRequest;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;<a name="line.256"></a>
+<span class="sourceLineNo">257</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest;<a name="line.257"></a>
+<span class="sourceLineNo">258</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse;<a name="line.258"></a>
+<span class="sourceLineNo">259</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;<a name="line.259"></a>
+<span class="sourceLineNo">260</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse;<a name="line.264"></a>
+<span class="sourceLineNo">265</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest;<a name="line.265"></a>
+<span class="sourceLineNo">266</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest;<a name="line.269"></a>
+<span class="sourceLineNo">270</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes;<a name="line.271"></a>
+<span class="sourceLineNo">272</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationRequest;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.FileArchiveNotificationResponse;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest;<a name="line.274"></a>
+<span class="sourceLineNo">275</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse;<a name="line.277"></a>
+<span class="sourceLineNo">278</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;<a name="line.279"></a>
+<span class="sourceLineNo">280</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse;<a name="line.283"></a>
+<span class="sourceLineNo">284</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RemoteProcedureResult;<a name="line.284"></a>
+<span class="sourceLineNo">285</span>import org.apache.hadoop.hbase.shaded.protobuf.genera

<TRUNCATED>

[38/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
index 422ad2f..97f206a 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":9,"i22":10,"i23":9,"i24":9,"i25":10,"i26":9,"i27":9,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":9,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":9,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":9,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":9,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":9,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":9,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":10,"i11
 0":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":9,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":9,"i22":10,"i23":9,"i24":9,"i25":10,"i26":9,"i27":9,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":9,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":9,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":9,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":9,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":9,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":9,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109":10,"i11
 0":10,"i111":10,"i112":10,"i113":10,"i114":9,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 <br>
 <pre>@InterfaceAudience.LimitedPrivate(value="Tools")
  @InterfaceStability.Evolving
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.216">HBaseFsck</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.215">HBaseFsck</a>
 extends org.apache.hadoop.conf.Configured
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a></pre>
 <div class="block">HBaseFsck (hbck) is a tool for checking and repairing region consistency and
@@ -880,33 +880,29 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getMaxOverlapsToSideline--">getMaxOverlapsToSideline</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i45" class="rowColor">
-<td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getMetaRegionServerName-int-">getMetaRegionServerName</a></span>(int&nbsp;replicaId)</code>&nbsp;</td>
-</tr>
-<tr id="i46" class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getOrCreateInfo-java.lang.String-">getOrCreateInfo</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</code>
 <div class="block">Gets the entry in regionInfo corresponding to the the given encoded
  region name.</div>
 </td>
 </tr>
-<tr id="i47" class="rowColor">
+<tr id="i46" class="altColor">
 <td class="colFirst"><code>org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getOverlapGroups-org.apache.hadoop.hbase.TableName-">getOverlapGroups</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table)</code>&nbsp;</td>
 </tr>
-<tr id="i48" class="altColor">
+<tr id="i47" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getRetCode--">getRetCode</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i49" class="rowColor">
+<tr id="i48" class="altColor">
 <td class="colFirst"><code>private org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getSidelineDir--">getSidelineDir</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i50" class="altColor">
+<tr id="i49" class="rowColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getTableDescriptors-java.util.List-">getTableDescriptors</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tableNames)</code>&nbsp;</td>
 </tr>
-<tr id="i51" class="rowColor">
+<tr id="i50" class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getTables-java.util.concurrent.atomic.AtomicInteger-">getTables</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a>&nbsp;numSkipped)</code>
 <div class="block">Return a list of user-space table names whose metadata have not been
@@ -916,434 +912,434 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
  milliseconds specified by timelag, then the table is a candidate to be returned.</div>
 </td>
 </tr>
-<tr id="i52" class="altColor">
+<tr id="i51" class="rowColor">
 <td class="colFirst"><code>static org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#getTmpDir-org.apache.hadoop.conf.Configuration-">getTmpDir</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
-<tr id="i53" class="rowColor">
+<tr id="i52" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#includeTable-org.apache.hadoop.hbase.TableName-">includeTable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table)</code>&nbsp;</td>
 </tr>
-<tr id="i54" class="altColor">
+<tr id="i53" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#isExclusive--">isExclusive</a></span>()</code>
 <div class="block">Only one instance of hbck can modify HBase at a time.</div>
 </td>
 </tr>
-<tr id="i55" class="rowColor">
+<tr id="i54" class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#isOptionsSupported-java.lang.String:A-">isOptionsSupported</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)</code>&nbsp;</td>
 </tr>
-<tr id="i56" class="altColor">
+<tr id="i55" class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#isTableDisabled-org.apache.hadoop.hbase.TableName-">isTableDisabled</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</code>
 <div class="block">Check if the specified region's table is disabled.</div>
 </td>
 </tr>
-<tr id="i57" class="rowColor">
+<tr id="i56" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#isTableIncluded-org.apache.hadoop.hbase.TableName-">isTableIncluded</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table)</code>
 <div class="block">Only check/fix tables specified by the list,
  Empty list means all tables are included.</div>
 </td>
 </tr>
-<tr id="i58" class="altColor">
+<tr id="i57" class="rowColor">
 <td class="colFirst"><code>static byte[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#keyOnly-byte:A-">keyOnly</a></span>(byte[]&nbsp;b)</code>&nbsp;</td>
 </tr>
-<tr id="i59" class="rowColor">
+<tr id="i58" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#loadDeployedRegions--">loadDeployedRegions</a></span>()</code>
 <div class="block">Get deployed regions according to the region servers.</div>
 </td>
 </tr>
-<tr id="i60" class="altColor">
+<tr id="i59" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#loadHdfsRegionDirs--">loadHdfsRegionDirs</a></span>()</code>
 <div class="block">Scan HDFS for all regions, recording their information into
  regionInfoMap</div>
 </td>
 </tr>
-<tr id="i61" class="rowColor">
+<tr id="i60" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#loadHdfsRegioninfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">loadHdfsRegioninfo</a></span>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)</code>
 <div class="block">Read the .regioninfo file from the file system.</div>
 </td>
 </tr>
-<tr id="i62" class="altColor">
+<tr id="i61" class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#loadHdfsRegionInfos--">loadHdfsRegionInfos</a></span>()</code>
 <div class="block">Populate hbi's from regionInfos loaded from file system.</div>
 </td>
 </tr>
-<tr id="i63" class="rowColor">
+<tr id="i62" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#loadMetaEntries--">loadMetaEntries</a></span>()</code>
 <div class="block">Scan hbase:meta, adding all regions found to the regionInfo map.</div>
 </td>
 </tr>
-<tr id="i64" class="altColor">
+<tr id="i63" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#loadTableInfosForTablesWithNoRegion--">loadTableInfosForTablesWithNoRegion</a></span>()</code>
 <div class="block">Loads table info's for tables that may not have been included, since there are no
  regions reported for the table, but table dir is there in hdfs</div>
 </td>
 </tr>
-<tr id="i65" class="rowColor">
+<tr id="i64" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#loadTableStates--">loadTableStates</a></span>()</code>
 <div class="block">Load the list of disabled tables in ZK into local set.</div>
 </td>
 </tr>
-<tr id="i66" class="altColor">
+<tr id="i65" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#logParallelMerge--">logParallelMerge</a></span>()</code>
 <div class="block">Log an appropriate message about whether or not overlapping merges are computed in parallel.</div>
 </td>
 </tr>
-<tr id="i67" class="rowColor">
+<tr id="i66" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#main-java.lang.String:A-">main</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)</code>
 <div class="block">Main program</div>
 </td>
 </tr>
-<tr id="i68" class="altColor">
+<tr id="i67" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#mergeRegionDirs-org.apache.hadoop.fs.Path-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">mergeRegionDirs</a></span>(org.apache.hadoop.fs.Path&nbsp;targetRegionDir,
                <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;contained)</code>
 <div class="block">Merge hdfs data by moving from contained HbckInfo into targetRegionDir.</div>
 </td>
 </tr>
-<tr id="i69" class="rowColor">
+<tr id="i68" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#offline-byte:A-">offline</a></span>(byte[]&nbsp;regionName)</code>
 <div class="block">This backwards-compatibility wrapper for permanently offlining a region
  that should not be alive.</div>
 </td>
 </tr>
-<tr id="i70" class="altColor">
+<tr id="i69" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#offlineHdfsIntegrityRepair--">offlineHdfsIntegrityRepair</a></span>()</code>
 <div class="block">This repair method analyzes hbase data in hdfs and repairs it to satisfy
  the table integrity rules.</div>
 </td>
 </tr>
-<tr id="i71" class="rowColor">
+<tr id="i70" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#offlineHLinkFileRepair--">offlineHLinkFileRepair</a></span>()</code>
 <div class="block">Scan all the store file names to find any lingering HFileLink files,
  which refer to some none-exiting files.</div>
 </td>
 </tr>
-<tr id="i72" class="altColor">
+<tr id="i71" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#offlineReferenceFileRepair--">offlineReferenceFileRepair</a></span>()</code>
 <div class="block">Scan all the store file names to find any lingering reference files,
  which refer to some none-exiting files.</div>
 </td>
 </tr>
-<tr id="i73" class="rowColor">
+<tr id="i72" class="altColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#onlineConsistencyRepair--">onlineConsistencyRepair</a></span>()</code>
 <div class="block">This repair method requires the cluster to be online since it contacts
  region servers and the masters.</div>
 </td>
 </tr>
-<tr id="i74" class="altColor">
+<tr id="i73" class="rowColor">
 <td class="colFirst"><code>int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#onlineHbck--">onlineHbck</a></span>()</code>
 <div class="block">Contacts the master and prints out cluster-wide information</div>
 </td>
 </tr>
-<tr id="i75" class="rowColor">
+<tr id="i74" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#preCheckPermission--">preCheckPermission</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i76" class="altColor">
+<tr id="i75" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#printTableSummary-java.util.SortedMap-">printTableSummary</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)</code>
 <div class="block">Prints summary of all tables found on the system.</div>
 </td>
 </tr>
-<tr id="i77" class="rowColor">
+<tr id="i76" class="altColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#printUsageAndExit--">printUsageAndExit</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i78" class="altColor">
+<tr id="i77" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#processRegionServers-java.util.Collection-">processRegionServers</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;regionServerList)</code>
 <div class="block">Contacts each regionserver and fetches metadata about regions.</div>
 </td>
 </tr>
-<tr id="i79" class="rowColor">
+<tr id="i78" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#rebuildMeta-boolean-">rebuildMeta</a></span>(boolean&nbsp;fix)</code>
 <div class="block">Rebuilds meta from information in hdfs/fs.</div>
 </td>
 </tr>
-<tr id="i80" class="altColor">
+<tr id="i79" class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#recordMetaRegion--">recordMetaRegion</a></span>()</code>
 <div class="block">Record the location of the hbase:meta region as found in ZooKeeper.</div>
 </td>
 </tr>
-<tr id="i81" class="rowColor">
+<tr id="i80" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#removeHBCKMetaRecoveryWALDir-java.lang.String-">removeHBCKMetaRecoveryWALDir</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;walFactoryId)</code>
 <div class="block">Removes the empty Meta recovery WAL directory.</div>
 </td>
 </tr>
-<tr id="i82" class="altColor">
+<tr id="i81" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#reportEmptyMetaCells--">reportEmptyMetaCells</a></span>()</code>
 <div class="block">TODO -- need to add tests for this.</div>
 </td>
 </tr>
-<tr id="i83" class="rowColor">
+<tr id="i82" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#reportTablesInFlux--">reportTablesInFlux</a></span>()</code>
 <div class="block">TODO -- need to add tests for this.</div>
 </td>
 </tr>
-<tr id="i84" class="altColor">
+<tr id="i83" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#resetSplitParent-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">resetSplitParent</a></span>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)</code>
 <div class="block">Reset the split parent region info in meta table</div>
 </td>
 </tr>
-<tr id="i85" class="rowColor">
+<tr id="i84" class="altColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#restoreHdfsIntegrity--">restoreHdfsIntegrity</a></span>()</code>
 <div class="block">This method determines if there are table integrity errors in HDFS.</div>
 </td>
 </tr>
-<tr id="i86" class="altColor">
+<tr id="i85" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setCheckHdfs-boolean-">setCheckHdfs</a></span>(boolean&nbsp;checking)</code>&nbsp;</td>
 </tr>
-<tr id="i87" class="rowColor">
+<tr id="i86" class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setCheckMetaOnly--">setCheckMetaOnly</a></span>()</code>
 <div class="block">Set hbase:meta check mode.</div>
 </td>
 </tr>
-<tr id="i88" class="altColor">
+<tr id="i87" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setCleanReplicationBarrier-boolean-">setCleanReplicationBarrier</a></span>(boolean&nbsp;shouldClean)</code>&nbsp;</td>
 </tr>
-<tr id="i89" class="rowColor">
+<tr id="i88" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setCleanReplicationBarrierTable-java.lang.String-">setCleanReplicationBarrierTable</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;cleanReplicationBarrierTable)</code>&nbsp;</td>
 </tr>
-<tr id="i90" class="altColor">
+<tr id="i89" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setDisplayFullReport--">setDisplayFullReport</a></span>()</code>
 <div class="block">Display the full report from fsck.</div>
 </td>
 </tr>
-<tr id="i91" class="rowColor">
+<tr id="i90" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixAssignments-boolean-">setFixAssignments</a></span>(boolean&nbsp;shouldFix)</code>
 <div class="block">Fix inconsistencies found by fsck.</div>
 </td>
 </tr>
-<tr id="i92" class="altColor">
+<tr id="i91" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixEmptyMetaCells-boolean-">setFixEmptyMetaCells</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i93" class="rowColor">
+<tr id="i92" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixHdfsHoles-boolean-">setFixHdfsHoles</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i94" class="altColor">
+<tr id="i93" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixHdfsOrphans-boolean-">setFixHdfsOrphans</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i95" class="rowColor">
+<tr id="i94" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixHdfsOverlaps-boolean-">setFixHdfsOverlaps</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i96" class="altColor">
+<tr id="i95" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixHFileLinks-boolean-">setFixHFileLinks</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i97" class="rowColor">
+<tr id="i96" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixMeta-boolean-">setFixMeta</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i98" class="altColor">
+<tr id="i97" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixReferenceFiles-boolean-">setFixReferenceFiles</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i99" class="rowColor">
+<tr id="i98" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixReplication-boolean-">setFixReplication</a></span>(boolean&nbsp;shouldFix)</code>
 <div class="block">Set replication fix mode.</div>
 </td>
 </tr>
-<tr id="i100" class="altColor">
+<tr id="i99" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixSplitParents-boolean-">setFixSplitParents</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i101" class="rowColor">
+<tr id="i100" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixTableOrphans-boolean-">setFixTableOrphans</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i102" class="altColor">
+<tr id="i101" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setFixVersionFile-boolean-">setFixVersionFile</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i103" class="rowColor">
+<tr id="i102" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setForceExclusive--">setForceExclusive</a></span>()</code>
 <div class="block">Set exclusive mode.</div>
 </td>
 </tr>
-<tr id="i104" class="altColor">
+<tr id="i103" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setHFileCorruptionChecker-org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker-">setHFileCorruptionChecker</a></span>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html" title="class in org.apache.hadoop.hbase.util.hbck">HFileCorruptionChecker</a>&nbsp;hfcc)</code>&nbsp;</td>
 </tr>
-<tr id="i105" class="rowColor">
+<tr id="i104" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setIgnorePreCheckPermission-boolean-">setIgnorePreCheckPermission</a></span>(boolean&nbsp;ignorePreCheckPermission)</code>&nbsp;</td>
 </tr>
-<tr id="i106" class="altColor">
+<tr id="i105" class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setMasterInMaintenanceMode--">setMasterInMaintenanceMode</a></span>()</code>
 <div class="block">This method maintains an ephemeral znode.</div>
 </td>
 </tr>
-<tr id="i107" class="rowColor">
+<tr id="i106" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setMaxMerge-int-">setMaxMerge</a></span>(int&nbsp;mm)</code>&nbsp;</td>
 </tr>
-<tr id="i108" class="altColor">
+<tr id="i107" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setMaxOverlapsToSideline-int-">setMaxOverlapsToSideline</a></span>(int&nbsp;mo)</code>&nbsp;</td>
 </tr>
-<tr id="i109" class="rowColor">
+<tr id="i108" class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setRegionBoundariesCheck--">setRegionBoundariesCheck</a></span>()</code>
 <div class="block">Set region boundaries check mode.</div>
 </td>
 </tr>
-<tr id="i110" class="altColor">
+<tr id="i109" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setRemoveParents-boolean-">setRemoveParents</a></span>(boolean&nbsp;shouldFix)</code>&nbsp;</td>
 </tr>
-<tr id="i111" class="rowColor">
+<tr id="i110" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setRetCode-int-">setRetCode</a></span>(int&nbsp;code)</code>&nbsp;</td>
 </tr>
-<tr id="i112" class="altColor">
+<tr id="i111" class="rowColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setShouldRerun--">setShouldRerun</a></span>()</code>
 <div class="block">Check if we should rerun fsck again.</div>
 </td>
 </tr>
-<tr id="i113" class="rowColor">
+<tr id="i112" class="altColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setSidelineBigOverlaps-boolean-">setSidelineBigOverlaps</a></span>(boolean&nbsp;sbo)</code>&nbsp;</td>
 </tr>
-<tr id="i114" class="altColor">
+<tr id="i113" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setSidelineDir-java.lang.String-">setSidelineDir</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;sidelineDir)</code>&nbsp;</td>
 </tr>
-<tr id="i115" class="rowColor">
+<tr id="i114" class="altColor">
 <td class="colFirst"><code>(package private) static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setSummary--">setSummary</a></span>()</code>
 <div class="block">Set summary mode.</div>
 </td>
 </tr>
-<tr id="i116" class="altColor">
+<tr id="i115" class="rowColor">
 <td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#setTimeLag-long-">setTimeLag</a></span>(long&nbsp;seconds)</code>
 <div class="block">We are interested in only those tables that have not changed their state in
  hbase:meta during the last few seconds specified by hbase.admin.fsck.timelag</div>
 </td>
 </tr>
-<tr id="i117" class="rowColor">
+<tr id="i116" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldCheckHdfs--">shouldCheckHdfs</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i118" class="altColor">
+<tr id="i117" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixAssignments--">shouldFixAssignments</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i119" class="rowColor">
+<tr id="i118" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixEmptyMetaCells--">shouldFixEmptyMetaCells</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i120" class="altColor">
+<tr id="i119" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixHdfsHoles--">shouldFixHdfsHoles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i121" class="rowColor">
+<tr id="i120" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixHdfsOrphans--">shouldFixHdfsOrphans</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i122" class="altColor">
+<tr id="i121" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixHdfsOverlaps--">shouldFixHdfsOverlaps</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i123" class="rowColor">
+<tr id="i122" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixHFileLinks--">shouldFixHFileLinks</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i124" class="altColor">
+<tr id="i123" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixMeta--">shouldFixMeta</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i125" class="rowColor">
+<tr id="i124" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixReferenceFiles--">shouldFixReferenceFiles</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i126" class="altColor">
+<tr id="i125" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixSplitParents--">shouldFixSplitParents</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i127" class="rowColor">
+<tr id="i126" class="altColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixTableOrphans--">shouldFixTableOrphans</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i128" class="altColor">
+<tr id="i127" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldFixVersionFile--">shouldFixVersionFile</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i129" class="rowColor">
+<tr id="i128" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldIgnorePreCheckPermission--">shouldIgnorePreCheckPermission</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i130" class="altColor">
+<tr id="i129" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldRemoveParents--">shouldRemoveParents</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i131" class="rowColor">
+<tr id="i130" class="altColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldRerun--">shouldRerun</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i132" class="altColor">
+<tr id="i131" class="rowColor">
 <td class="colFirst"><code>boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#shouldSidelineBigOverlaps--">shouldSidelineBigOverlaps</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i133" class="rowColor">
+<tr id="i132" class="altColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#sidelineFile-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-">sidelineFile</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
             org.apache.hadoop.fs.Path&nbsp;hbaseRoot,
             org.apache.hadoop.fs.Path&nbsp;path)</code>&nbsp;</td>
 </tr>
-<tr id="i134" class="altColor">
+<tr id="i133" class="rowColor">
 <td class="colFirst"><code>(package private) org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#sidelineOldMeta--">sidelineOldMeta</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i135" class="rowColor">
+<tr id="i134" class="altColor">
 <td class="colFirst"><code>(package private) org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#sidelineRegionDir-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">sidelineRegionDir</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)</code>
 <div class="block">Sideline a region dir (instead of deleting it)</div>
 </td>
 </tr>
-<tr id="i136" class="altColor">
+<tr id="i135" class="rowColor">
 <td class="colFirst"><code>(package private) org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#sidelineRegionDir-org.apache.hadoop.fs.FileSystem-java.lang.String-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">sidelineRegionDir</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;parentDir,
@@ -1351,7 +1347,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <div class="block">Sideline a region dir (instead of deleting it)</div>
 </td>
 </tr>
-<tr id="i137" class="rowColor">
+<tr id="i136" class="altColor">
 <td class="colFirst"><code>(package private) void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#sidelineTable-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.TableName-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-">sidelineTable</a></span>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
              <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
@@ -1360,30 +1356,30 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <div class="block">Side line an entire table.</div>
 </td>
 </tr>
-<tr id="i138" class="altColor">
+<tr id="i137" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#suggestFixes-java.util.SortedMap-">suggestFixes</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)</code>
 <div class="block">Suggest fixes for each table</div>
 </td>
 </tr>
-<tr id="i139" class="rowColor">
+<tr id="i138" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#tryAssignmentRepair-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-java.lang.String-">tryAssignmentRepair</a></span>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi,
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg)</code>&nbsp;</td>
 </tr>
-<tr id="i140" class="altColor">
+<tr id="i139" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#unassignMetaReplica-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">unassignMetaReplica</a></span>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)</code>&nbsp;</td>
 </tr>
-<tr id="i141" class="rowColor">
+<tr id="i140" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#undeployRegions-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">undeployRegions</a></span>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)</code>&nbsp;</td>
 </tr>
-<tr id="i142" class="altColor">
+<tr id="i141" class="rowColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#undeployRegionsForHbi-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">undeployRegionsForHbi</a></span>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)</code>&nbsp;</td>
 </tr>
-<tr id="i143" class="rowColor">
+<tr id="i142" class="altColor">
 <td class="colFirst"><code>private void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#unlockHbck--">unlockHbck</a></span>()</code>&nbsp;</td>
 </tr>
@@ -1422,7 +1418,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_TIME_LAG</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.217">DEFAULT_TIME_LAG</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.216">DEFAULT_TIME_LAG</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_TIME_LAG">Constant Field Values</a></dd>
@@ -1435,7 +1431,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_SLEEP_BEFORE_RERUN</h4>
-<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.218">DEFAULT_SLEEP_BEFORE_RERUN</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.217">DEFAULT_SLEEP_BEFORE_RERUN</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_SLEEP_BEFORE_RERUN">Constant Field Values</a></dd>
@@ -1448,7 +1444,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>MAX_NUM_THREADS</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.219">MAX_NUM_THREADS</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.218">MAX_NUM_THREADS</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.MAX_NUM_THREADS">Constant Field Values</a></dd>
@@ -1461,7 +1457,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>rsSupportsOffline</h4>
-<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.220">rsSupportsOffline</a></pre>
+<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.219">rsSupportsOffline</a></pre>
 </li>
 </ul>
 <a name="DEFAULT_OVERLAPS_TO_SIDELINE">
@@ -1470,7 +1466,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_OVERLAPS_TO_SIDELINE</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.221">DEFAULT_OVERLAPS_TO_SIDELINE</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.220">DEFAULT_OVERLAPS_TO_SIDELINE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_OVERLAPS_TO_SIDELINE">Constant Field Values</a></dd>
@@ -1483,7 +1479,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MAX_MERGE</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.222">DEFAULT_MAX_MERGE</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.221">DEFAULT_MAX_MERGE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_MAX_MERGE">Constant Field Values</a></dd>
@@ -1496,7 +1492,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>TO_BE_LOADED</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.223">TO_BE_LOADED</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.222">TO_BE_LOADED</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.TO_BE_LOADED">Constant Field Values</a></dd>
@@ -1509,7 +1505,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>HBCK_LOCK_FILE</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.229">HBCK_LOCK_FILE</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.228">HBCK_LOCK_FILE</a></pre>
 <div class="block">Here is where hbase-1.x used to default the lock for hbck1.
  It puts in place a lock when it goes to write/make changes.</div>
 <dl>
@@ -1524,7 +1520,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MAX_LOCK_FILE_ATTEMPTS</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.230">DEFAULT_MAX_LOCK_FILE_ATTEMPTS</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.229">DEFAULT_MAX_LOCK_FILE_ATTEMPTS</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_MAX_LOCK_FILE_ATTEMPTS">Constant Field Values</a></dd>
@@ -1537,7 +1533,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.231">DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.230">DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL">Constant Field Values</a></dd>
@@ -1550,7 +1546,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.232">DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.231">DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME">Constant Field Values</a></dd>
@@ -1563,7 +1559,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_WAIT_FOR_LOCK_TIMEOUT</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.237">DEFAULT_WAIT_FOR_LOCK_TIMEOUT</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.236">DEFAULT_WAIT_FOR_LOCK_TIMEOUT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_WAIT_FOR_LOCK_TIMEOUT">Constant Field Values</a></dd>
@@ -1576,7 +1572,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.238">DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.237">DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS">Constant Field Values</a></dd>
@@ -1589,7 +1585,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.239">DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.238">DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL">Constant Field Values</a></dd>
@@ -1602,7 +1598,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.240">DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.239">DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME">Constant Field Values</a></dd>
@@ -1615,7 +1611,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.245">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.244">LOG</a></pre>
 <div class="block">Internal resources</div>
 </li>
 </ul>
@@ -1625,7 +1621,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>status</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ClusterMetrics.html" title="interface in org.apache.hadoop.hbase">ClusterMetrics</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.246">status</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ClusterMetrics.html" title="interface in org.apache.hadoop.hbase">ClusterMetrics</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.245">status</a></pre>
 </li>
 </ul>
 <a name="connection">
@@ -1634,7 +1630,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>connection</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.247">connection</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.246">connection</a></pre>
 </li>
 </ul>
 <a name="admin">
@@ -1643,7 +1639,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>admin</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.248">admin</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Admin.html" title="interface in org.apache.hadoop.hbase.client">Admin</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.247">admin</a></pre>
 </li>
 </ul>
 <a name="meta">
@@ -1652,7 +1648,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>meta</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.249">meta</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.248">meta</a></pre>
 </li>
 </ul>
 <a name="executor">
@@ -1661,7 +1657,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>executor</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.251">executor</a></pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.250">executor</a></pre>
 </li>
 </ul>
 <a name="startMillis">
@@ -1670,7 +1666,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>startMillis</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.252">startMillis</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.251">startMillis</a></pre>
 </li>
 </ul>
 <a name="hfcc">
@@ -1679,7 +1675,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>hfcc</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html" title="class in org.apache.hadoop.hbase.util.hbck">HFileCorruptionChecker</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.253">hfcc</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html" title="class in org.apache.hadoop.hbase.util.hbck">HFileCorruptionChecker</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.252">hfcc</a></pre>
 </li>
 </ul>
 <a name="retcode">
@@ -1688,7 +1684,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>retcode</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.254">retcode</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.253">retcode</a></pre>
 </li>
 </ul>
 <a name="HBCK_LOCK_PATH">
@@ -1697,7 +1693,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>HBCK_LOCK_PATH</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.255">HBCK_LOCK_PATH</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.254">HBCK_LOCK_PATH</a></pre>
 </li>
 </ul>
 <a name="hbckOutFd">
@@ -1706,7 +1702,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>hbckOutFd</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FSDataOutputStream <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.256">hbckOutFd</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FSDataOutputStream <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.255">hbckOutFd</a></pre>
 </li>
 </ul>
 <a name="hbckLockCleanup">
@@ -1715,7 +1711,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>hbckLockCleanup</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicBoolean</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.260">hbckLockCleanup</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicBoolean</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.259">hbckLockCleanup</a></pre>
 </li>
 </ul>
 <a name="unsupportedOptionsInV2">
@@ -1724,7 +1720,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>unsupportedOptionsInV2</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.263">unsupportedOptionsInV2</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.262">unsupportedOptionsInV2</a></pre>
 </li>
 </ul>
 <a name="details">
@@ -1733,7 +1729,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>details</h4>
-<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.271">details</a></pre>
+<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.270">details</a></pre>
 <div class="block">Options</div>
 </li>
 </ul>
@@ -1743,7 +1739,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>timelag</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.272">timelag</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.271">timelag</a></pre>
 </li>
 </ul>
 <a name="forceExclusive">
@@ -1752,7 +1748,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>forceExclusive</h4>
-<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.273">forceExclusive</a></pre>
+<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.272">forceExclusive</a></pre>
 </li>
 </ul>
 <a name="fixAssignments">
@@ -1761,7 +1757,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixAssignments</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.274">fixAssignments</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.273">fixAssignments</a></pre>
 </li>
 </ul>
 <a name="fixMeta">
@@ -1770,7 +1766,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixMeta</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.275">fixMeta</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.274">fixMeta</a></pre>
 </li>
 </ul>
 <a name="checkHdfs">
@@ -1779,7 +1775,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkHdfs</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.276">checkHdfs</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.275">checkHdfs</a></pre>
 </li>
 </ul>
 <a name="fixHdfsHoles">
@@ -1788,7 +1784,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixHdfsHoles</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.277">fixHdfsHoles</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.276">fixHdfsHoles</a></pre>
 </li>
 </ul>
 <a name="fixHdfsOverlaps">
@@ -1797,7 +1793,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixHdfsOverlaps</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.278">fixHdfsOverlaps</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.277">fixHdfsOverlaps</a></pre>
 </li>
 </ul>
 <a name="fixHdfsOrphans">
@@ -1806,7 +1802,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixHdfsOrphans</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.279">fixHdfsOrphans</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.278">fixHdfsOrphans</a></pre>
 </li>
 </ul>
 <a name="fixTableOrphans">
@@ -1815,7 +1811,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixTableOrphans</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.280">fixTableOrphans</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.279">fixTableOrphans</a></pre>
 </li>
 </ul>
 <a name="fixVersionFile">
@@ -1824,7 +1820,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixVersionFile</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.281">fixVersionFile</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.280">fixVersionFile</a></pre>
 </li>
 </ul>
 <a name="fixSplitParents">
@@ -1833,7 +1829,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixSplitParents</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.282">fixSplitParents</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.281">fixSplitParents</a></pre>
 </li>
 </ul>
 <a name="removeParents">
@@ -1842,7 +1838,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>removeParents</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.283">removeParents</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.282">removeParents</a></pre>
 </li>
 </ul>
 <a name="fixReferenceFiles">
@@ -1851,7 +1847,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixReferenceFiles</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.284">fixReferenceFiles</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.283">fixReferenceFiles</a></pre>
 </li>
 </ul>
 <a name="fixHFileLinks">
@@ -1860,7 +1856,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixHFileLinks</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.285">fixHFileLinks</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.284">fixHFileLinks</a></pre>
 </li>
 </ul>
 <a name="fixEmptyMetaCells">
@@ -1869,7 +1865,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixEmptyMetaCells</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.286">fixEmptyMetaCells</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.285">fixEmptyMetaCells</a></pre>
 </li>
 </ul>
 <a name="fixReplication">
@@ -1878,7 +1874,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixReplication</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.287">fixReplication</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.286">fixReplication</a></pre>
 </li>
 </ul>
 <a name="cleanReplicationBarrier">
@@ -1887,7 +1883,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>cleanReplicationBarrier</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.288">cleanReplicationBarrier</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.287">cleanReplicationBarrier</a></pre>
 </li>
 </ul>
 <a name="fixAny">
@@ -1896,7 +1892,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixAny</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.289">fixAny</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.288">fixAny</a></pre>
 </li>
 </ul>
 <a name="tablesIncluded">
@@ -1905,7 +1901,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>tablesIncluded</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.293">tablesIncluded</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.292">tablesIncluded</a></pre>
 </li>
 </ul>
 <a name="cleanReplicationBarrierTable">
@@ -1914,7 +1910,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>cleanReplicationBarrierTable</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.294">cleanReplicationBarrierTable</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.293">cleanReplicationBarrierTable</a></pre>
 </li>
 </ul>
 <a name="maxMerge">
@@ -1923,7 +1919,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>maxMerge</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.295">maxMerge</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.294">maxMerge</a></pre>
 </li>
 </ul>
 <a name="maxOverlapsToSideline">
@@ -1932,7 +1928,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>maxOverlapsToSideline</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.297">maxOverlapsToSideline</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.296">maxOverlapsToSideline</a></pre>
 </li>
 </ul>
 <a name="sidelineBigOverlaps">
@@ -1941,7 +1937,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelineBigOverlaps</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.298">sidelineBigOverlaps</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.297">sidelineBigOverlaps</a></pre>
 </li>
 </ul>
 <a name="sidelineDir">
@@ -1950,7 +1946,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelineDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.299">sidelineDir</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.298">sidelineDir</a></pre>
 </li>
 </ul>
 <a name="rerun">
@@ -1959,7 +1955,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>rerun</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.301">rerun</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.300">rerun</a></pre>
 </li>
 </ul>
 <a name="summary">
@@ -1968,7 +1964,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>summary</h4>
-<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.302">summary</a></pre>
+<pre>private static&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.301">summary</a></pre>
 </li>
 </ul>
 <a name="checkMetaOnly">
@@ -1977,7 +1973,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkMetaOnly</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.303">checkMetaOnly</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.302">checkMetaOnly</a></pre>
 </li>
 </ul>
 <a name="checkRegionBoundaries">
@@ -1986,7 +1982,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRegionBoundaries</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.304">checkRegionBoundaries</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.303">checkRegionBoundaries</a></pre>
 </li>
 </ul>
 <a name="ignorePreCheckPermission">
@@ -1995,7 +1991,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>ignorePreCheckPermission</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.305">ignorePreCheckPermission</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.304">ignorePreCheckPermission</a></pre>
 </li>
 </ul>
 <a name="errors">
@@ -2004,7 +2000,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>errors</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.310">errors</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.309">errors</a></pre>
 <div class="block">State</div>
 </li>
 </ul>
@@ -2014,7 +2010,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixes</h4>
-<pre>int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.311">fixes</a></pre>
+<pre>int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.310">fixes</a></pre>
 </li>
 </ul>
 <a name="regionInfoMap">
@@ -2023,7 +2019,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>regionInfoMap</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/TreeMap.html?is-external=true" title="class or interface in java.util">TreeMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.318">regionInfoMap</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/TreeMap.html?is-external=true" title="class or interface in java.util">TreeMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.317">regionInfoMap</a></pre>
 <div class="block">This map contains the state of all hbck items.  It maps from encoded region
  name to HbckInfo structure.  The information contained in HbckInfo is used
  to detect and correct consistency (hdfs/meta/deployment) problems.</div>
@@ -2035,7 +2031,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>emptyRegionInfoQualifiers</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.320">emptyRegionInfoQualifiers</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.319">emptyRegionInfoQualifiers</a></pre>
 </li>
 </ul>
 <a name="tablesInfo">
@@ -2044,7 +2040,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>tablesInfo</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.332">tablesInfo</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.331">

<TRUNCATED>

[27/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index ea05301..26a93dd 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -269,3590 +269,3574 @@
 <span class="sourceLineNo">261</span>   */<a name="line.261"></a>
 <span class="sourceLineNo">262</span>  protected ClusterConnection clusterConnection;<a name="line.262"></a>
 <span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>  /*<a name="line.264"></a>
-<span class="sourceLineNo">265</span>   * Long-living meta table locator, which is created when the server is started and stopped<a name="line.265"></a>
-<span class="sourceLineNo">266</span>   * when server shuts down. References to this locator shall be used to perform according<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * operations in EventHandlers. Primary reason for this decision is to make it mockable<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   * for tests.<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   */<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  protected MetaTableLocator metaTableLocator;<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  /**<a name="line.272"></a>
-<span class="sourceLineNo">273</span>   * Go here to get table descriptors.<a name="line.273"></a>
-<span class="sourceLineNo">274</span>   */<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  protected TableDescriptors tableDescriptors;<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>  // Replication services. If no replication, this handler will be null.<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.279"></a>
-<span class="sourceLineNo">280</span><a name="line.280"></a>
-<span class="sourceLineNo">281</span>  // Compactions<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  public CompactSplit compactSplitThread;<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Map of regions currently being served by this region server. Key is the<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   * encoded region name.  All access should be synchronized.<a name="line.286"></a>
-<span class="sourceLineNo">287</span>   */<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  /**<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * and here we really mean DataNode locations.<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   */<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  // Leases<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  protected Leases leases;<a name="line.303"></a>
+<span class="sourceLineNo">264</span>  /**<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * Go here to get table descriptors.<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  protected TableDescriptors tableDescriptors;<a name="line.267"></a>
+<span class="sourceLineNo">268</span><a name="line.268"></a>
+<span class="sourceLineNo">269</span>  // Replication services. If no replication, this handler will be null.<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  protected ReplicationSourceService replicationSourceHandler;<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  protected ReplicationSinkService replicationSinkHandler;<a name="line.271"></a>
+<span class="sourceLineNo">272</span><a name="line.272"></a>
+<span class="sourceLineNo">273</span>  // Compactions<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  public CompactSplit compactSplitThread;<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  /**<a name="line.276"></a>
+<span class="sourceLineNo">277</span>   * Map of regions currently being served by this region server. Key is the<a name="line.277"></a>
+<span class="sourceLineNo">278</span>   * encoded region name.  All access should be synchronized.<a name="line.278"></a>
+<span class="sourceLineNo">279</span>   */<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  protected final Map&lt;String, HRegion&gt; onlineRegions = new ConcurrentHashMap&lt;&gt;();<a name="line.280"></a>
+<span class="sourceLineNo">281</span><a name="line.281"></a>
+<span class="sourceLineNo">282</span>  /**<a name="line.282"></a>
+<span class="sourceLineNo">283</span>   * Map of encoded region names to the DataNode locations they should be hosted on<a name="line.283"></a>
+<span class="sourceLineNo">284</span>   * We store the value as InetSocketAddress since this is used only in HDFS<a name="line.284"></a>
+<span class="sourceLineNo">285</span>   * API (create() that takes favored nodes as hints for placing file blocks).<a name="line.285"></a>
+<span class="sourceLineNo">286</span>   * We could have used ServerName here as the value class, but we'd need to<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * convert it to InetSocketAddress at some point before the HDFS API call, and<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   * it seems a bit weird to store ServerName since ServerName refers to RegionServers<a name="line.288"></a>
+<span class="sourceLineNo">289</span>   * and here we really mean DataNode locations.<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   */<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  protected final Map&lt;String, InetSocketAddress[]&gt; regionFavoredNodesMap =<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      new ConcurrentHashMap&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  // Leases<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  protected Leases leases;<a name="line.295"></a>
+<span class="sourceLineNo">296</span><a name="line.296"></a>
+<span class="sourceLineNo">297</span>  // Instance of the hbase executor executorService.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  protected ExecutorService executorService;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  // If false, the file system has become unavailable<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  protected volatile boolean fsOk;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  protected HFileSystem fs;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  protected HFileSystem walFs;<a name="line.303"></a>
 <span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>  // Instance of the hbase executor executorService.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  protected ExecutorService executorService;<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  // If false, the file system has become unavailable<a name="line.308"></a>
-<span class="sourceLineNo">309</span>  protected volatile boolean fsOk;<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  protected HFileSystem fs;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  protected HFileSystem walFs;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  // Set when a report to the master comes back with a message asking us to<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  // of HRegionServer in isolation.<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private volatile boolean stopped = false;<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // debugging and unit tests.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private volatile boolean abortRequested;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.321"></a>
-<span class="sourceLineNo">322</span>  // Default abort timeout is 1200 seconds for safe<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  // Will run this task when abort timeout<a name="line.324"></a>
-<span class="sourceLineNo">325</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.325"></a>
+<span class="sourceLineNo">305</span>  // Set when a report to the master comes back with a message asking us to<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  // shutdown. Also set by call to stop when debugging or running unit tests<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  // of HRegionServer in isolation.<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private volatile boolean stopped = false;<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  // Go down hard. Used if file system becomes unavailable and also in<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  // debugging and unit tests.<a name="line.311"></a>
+<span class="sourceLineNo">312</span>  private volatile boolean abortRequested;<a name="line.312"></a>
+<span class="sourceLineNo">313</span>  public static final String ABORT_TIMEOUT = "hbase.regionserver.abort.timeout";<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  // Default abort timeout is 1200 seconds for safe<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  private static final long DEFAULT_ABORT_TIMEOUT = 1200000;<a name="line.315"></a>
+<span class="sourceLineNo">316</span>  // Will run this task when abort timeout<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  public static final String ABORT_TIMEOUT_TASK = "hbase.regionserver.abort.timeout.task";<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  // space regions.<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  private boolean stopping = false;<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>  volatile boolean killed = false;<a name="line.325"></a>
 <span class="sourceLineNo">326</span><a name="line.326"></a>
-<span class="sourceLineNo">327</span>  ConcurrentMap&lt;String, Integer&gt; rowlocks = new ConcurrentHashMap&lt;&gt;();<a name="line.327"></a>
+<span class="sourceLineNo">327</span>  private volatile boolean shutDown = false;<a name="line.327"></a>
 <span class="sourceLineNo">328</span><a name="line.328"></a>
-<span class="sourceLineNo">329</span>  // A state before we go into stopped state.  At this stage we're closing user<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  // space regions.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  private boolean stopping = false;<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  volatile boolean killed = false;<a name="line.333"></a>
-<span class="sourceLineNo">334</span><a name="line.334"></a>
-<span class="sourceLineNo">335</span>  private volatile boolean shutDown = false;<a name="line.335"></a>
-<span class="sourceLineNo">336</span><a name="line.336"></a>
-<span class="sourceLineNo">337</span>  protected final Configuration conf;<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Path rootDir;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Path walRootDir;<a name="line.340"></a>
+<span class="sourceLineNo">329</span>  protected final Configuration conf;<a name="line.329"></a>
+<span class="sourceLineNo">330</span><a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private Path rootDir;<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  private Path walRootDir;<a name="line.332"></a>
+<span class="sourceLineNo">333</span><a name="line.333"></a>
+<span class="sourceLineNo">334</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  final int numRetries;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  protected final int threadWakeFrequency;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  protected final int msgInterval;<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  protected final int numRegionsToReport;<a name="line.340"></a>
 <span class="sourceLineNo">341</span><a name="line.341"></a>
-<span class="sourceLineNo">342</span>  protected final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  final int numRetries;<a name="line.344"></a>
-<span class="sourceLineNo">345</span>  protected final int threadWakeFrequency;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  protected final int msgInterval;<a name="line.346"></a>
+<span class="sourceLineNo">342</span>  // Stub to do region server status calls against the master.<a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.343"></a>
+<span class="sourceLineNo">344</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  RpcClient rpcClient;<a name="line.346"></a>
 <span class="sourceLineNo">347</span><a name="line.347"></a>
-<span class="sourceLineNo">348</span>  protected final int numRegionsToReport;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  // Stub to do region server status calls against the master.<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  private volatile RegionServerStatusService.BlockingInterface rssStub;<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  private volatile LockService.BlockingInterface lockStub;<a name="line.352"></a>
-<span class="sourceLineNo">353</span>  // RPC client. Used to make the stub above that does region server status checking.<a name="line.353"></a>
-<span class="sourceLineNo">354</span>  RpcClient rpcClient;<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.357"></a>
+<span class="sourceLineNo">348</span>  private RpcRetryingCallerFactory rpcRetryingCallerFactory;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>  private RpcControllerFactory rpcControllerFactory;<a name="line.349"></a>
+<span class="sourceLineNo">350</span><a name="line.350"></a>
+<span class="sourceLineNo">351</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.351"></a>
+<span class="sourceLineNo">352</span><a name="line.352"></a>
+<span class="sourceLineNo">353</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.353"></a>
+<span class="sourceLineNo">354</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.354"></a>
+<span class="sourceLineNo">355</span>  // into web context.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  protected InfoServer infoServer;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>  private JvmPauseMonitor pauseMonitor;<a name="line.357"></a>
 <span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  private UncaughtExceptionHandler uncaughtExceptionHandler;<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  // Info server. Default access so can be used by unit tests. REGIONSERVER<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  // is name of the webapp and the attribute name used stuffing this instance<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  // into web context.<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  protected InfoServer infoServer;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  private JvmPauseMonitor pauseMonitor;<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  /** region server process name */<a name="line.367"></a>
-<span class="sourceLineNo">368</span>  public static final String REGIONSERVER = "regionserver";<a name="line.368"></a>
-<span class="sourceLineNo">369</span><a name="line.369"></a>
-<span class="sourceLineNo">370</span>  MetricsRegionServer metricsRegionServer;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  MetricsTable metricsTable;<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  private SpanReceiverHost spanReceiverHost;<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   */<a name="line.376"></a>
-<span class="sourceLineNo">377</span>  private ChoreService choreService;<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
-<span class="sourceLineNo">379</span>  /*<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * Check for compactions requests.<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   */<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  ScheduledChore compactionChecker;<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /*<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * Check for flushes<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
-<span class="sourceLineNo">387</span>  ScheduledChore periodicFlusher;<a name="line.387"></a>
-<span class="sourceLineNo">388</span><a name="line.388"></a>
-<span class="sourceLineNo">389</span>  protected volatile WALFactory walFactory;<a name="line.389"></a>
-<span class="sourceLineNo">390</span><a name="line.390"></a>
-<span class="sourceLineNo">391</span>  // WAL roller. log is protected rather than private to avoid<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  // eclipse warning when accessed by inner classes<a name="line.392"></a>
-<span class="sourceLineNo">393</span>  protected LogRoller walRoller;<a name="line.393"></a>
-<span class="sourceLineNo">394</span><a name="line.394"></a>
-<span class="sourceLineNo">395</span>  // A thread which calls reportProcedureDone<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  // flag set after we're done setting up server threads<a name="line.398"></a>
-<span class="sourceLineNo">399</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.399"></a>
-<span class="sourceLineNo">400</span><a name="line.400"></a>
-<span class="sourceLineNo">401</span>  // zookeeper connection and watcher<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  protected final ZKWatcher zooKeeper;<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>  // master address tracker<a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>  // Cluster Status Tracker<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  // Log Splitting Worker<a name="line.410"></a>
-<span class="sourceLineNo">411</span>  private SplitLogWorker splitLogWorker;<a name="line.411"></a>
+<span class="sourceLineNo">359</span>  /** region server process name */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public static final String REGIONSERVER = "regionserver";<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  MetricsRegionServer metricsRegionServer;<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  MetricsTable metricsTable;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>  private SpanReceiverHost spanReceiverHost;<a name="line.364"></a>
+<span class="sourceLineNo">365</span><a name="line.365"></a>
+<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   * ChoreService used to schedule tasks that we want to run periodically<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   */<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  private ChoreService choreService;<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  /*<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * Check for compactions requests.<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   */<a name="line.373"></a>
+<span class="sourceLineNo">374</span>  ScheduledChore compactionChecker;<a name="line.374"></a>
+<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">376</span>  /*<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * Check for flushes<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   */<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  ScheduledChore periodicFlusher;<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  protected volatile WALFactory walFactory;<a name="line.381"></a>
+<span class="sourceLineNo">382</span><a name="line.382"></a>
+<span class="sourceLineNo">383</span>  // WAL roller. log is protected rather than private to avoid<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  // eclipse warning when accessed by inner classes<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected LogRoller walRoller;<a name="line.385"></a>
+<span class="sourceLineNo">386</span><a name="line.386"></a>
+<span class="sourceLineNo">387</span>  // A thread which calls reportProcedureDone<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  private RemoteProcedureResultReporter procedureResultReporter;<a name="line.388"></a>
+<span class="sourceLineNo">389</span><a name="line.389"></a>
+<span class="sourceLineNo">390</span>  // flag set after we're done setting up server threads<a name="line.390"></a>
+<span class="sourceLineNo">391</span>  final AtomicBoolean online = new AtomicBoolean(false);<a name="line.391"></a>
+<span class="sourceLineNo">392</span><a name="line.392"></a>
+<span class="sourceLineNo">393</span>  // zookeeper connection and watcher<a name="line.393"></a>
+<span class="sourceLineNo">394</span>  protected final ZKWatcher zooKeeper;<a name="line.394"></a>
+<span class="sourceLineNo">395</span><a name="line.395"></a>
+<span class="sourceLineNo">396</span>  // master address tracker<a name="line.396"></a>
+<span class="sourceLineNo">397</span>  private final MasterAddressTracker masterAddressTracker;<a name="line.397"></a>
+<span class="sourceLineNo">398</span><a name="line.398"></a>
+<span class="sourceLineNo">399</span>  // Cluster Status Tracker<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  protected final ClusterStatusTracker clusterStatusTracker;<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  // Log Splitting Worker<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  private SplitLogWorker splitLogWorker;<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  // A sleeper that sleeps for msgInterval.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  protected final Sleeper sleeper;<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private final int operationTimeout;<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  private final int shortOperationTimeout;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.411"></a>
 <span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>  // A sleeper that sleeps for msgInterval.<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  protected final Sleeper sleeper;<a name="line.414"></a>
-<span class="sourceLineNo">415</span><a name="line.415"></a>
-<span class="sourceLineNo">416</span>  private final int operationTimeout;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>  private final int shortOperationTimeout;<a name="line.417"></a>
-<span class="sourceLineNo">418</span><a name="line.418"></a>
-<span class="sourceLineNo">419</span>  private final RegionServerAccounting regionServerAccounting;<a name="line.419"></a>
+<span class="sourceLineNo">413</span>  // Cache configuration and block cache reference<a name="line.413"></a>
+<span class="sourceLineNo">414</span>  protected CacheConfig cacheConfig;<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  // Cache configuration for mob<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  final MobCacheConfig mobCacheConfig;<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** The health check chore. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  private HealthCheckChore healthCheckChore;<a name="line.419"></a>
 <span class="sourceLineNo">420</span><a name="line.420"></a>
-<span class="sourceLineNo">421</span>  // Cache configuration and block cache reference<a name="line.421"></a>
-<span class="sourceLineNo">422</span>  protected CacheConfig cacheConfig;<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  // Cache configuration for mob<a name="line.423"></a>
-<span class="sourceLineNo">424</span>  final MobCacheConfig mobCacheConfig;<a name="line.424"></a>
+<span class="sourceLineNo">421</span>  /** The nonce manager chore. */<a name="line.421"></a>
+<span class="sourceLineNo">422</span>  private ScheduledChore nonceManagerChore;<a name="line.422"></a>
+<span class="sourceLineNo">423</span><a name="line.423"></a>
+<span class="sourceLineNo">424</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.424"></a>
 <span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>  /** The health check chore. */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>  private HealthCheckChore healthCheckChore;<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>  /** The nonce manager chore. */<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  private ScheduledChore nonceManagerChore;<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  private Map&lt;String, com.google.protobuf.Service&gt; coprocessorServiceHandlers = Maps.newHashMap();<a name="line.432"></a>
-<span class="sourceLineNo">433</span><a name="line.433"></a>
-<span class="sourceLineNo">434</span>  /**<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   * against  Master.<a name="line.437"></a>
-<span class="sourceLineNo">438</span>   */<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected ServerName serverName;<a name="line.439"></a>
-<span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>  /*<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   * hostname specified by hostname config<a name="line.442"></a>
-<span class="sourceLineNo">443</span>   */<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  protected String useThisHostnameInstead;<a name="line.444"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * The server name the Master sees us as.  Its made from the hostname the<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * master passes us, port, and server startcode. Gets set after registration<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * against  Master.<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   */<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  protected ServerName serverName;<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /*<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * hostname specified by hostname config<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  protected String useThisHostnameInstead;<a name="line.436"></a>
+<span class="sourceLineNo">437</span><a name="line.437"></a>
+<span class="sourceLineNo">438</span>  // key to the config parameter of server hostname<a name="line.438"></a>
+<span class="sourceLineNo">439</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.439"></a>
+<span class="sourceLineNo">440</span>  // both master and region server<a name="line.440"></a>
+<span class="sourceLineNo">441</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.441"></a>
+<span class="sourceLineNo">442</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.442"></a>
+<span class="sourceLineNo">443</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.444"></a>
 <span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  // key to the config parameter of server hostname<a name="line.446"></a>
-<span class="sourceLineNo">447</span>  // the specification of server hostname is optional. The hostname should be resolvable from<a name="line.447"></a>
-<span class="sourceLineNo">448</span>  // both master and region server<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.449"></a>
-<span class="sourceLineNo">450</span>  final static String RS_HOSTNAME_KEY = "hbase.regionserver.hostname";<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.451"></a>
-<span class="sourceLineNo">452</span>  protected final static String MASTER_HOSTNAME_KEY = "hbase.master.hostname";<a name="line.452"></a>
-<span class="sourceLineNo">453</span><a name="line.453"></a>
-<span class="sourceLineNo">454</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.454"></a>
-<span class="sourceLineNo">455</span>  // Exception will be thrown if both are used.<a name="line.455"></a>
-<span class="sourceLineNo">456</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.456"></a>
-<span class="sourceLineNo">457</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * This servers startcode.<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  protected final long startcode;<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>  /**<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   * Unique identifier for the cluster we are a part of.<a name="line.465"></a>
-<span class="sourceLineNo">466</span>   */<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  protected String clusterId;<a name="line.467"></a>
+<span class="sourceLineNo">446</span>  // HBASE-18226: This config and hbase.regionserver.hostname are mutually exclusive.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  // Exception will be thrown if both are used.<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  final static String RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY =<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    "hbase.regionserver.hostname.disable.master.reversedns";<a name="line.449"></a>
+<span class="sourceLineNo">450</span><a name="line.450"></a>
+<span class="sourceLineNo">451</span>  /**<a name="line.451"></a>
+<span class="sourceLineNo">452</span>   * This servers startcode.<a name="line.452"></a>
+<span class="sourceLineNo">453</span>   */<a name="line.453"></a>
+<span class="sourceLineNo">454</span>  protected final long startcode;<a name="line.454"></a>
+<span class="sourceLineNo">455</span><a name="line.455"></a>
+<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
+<span class="sourceLineNo">457</span>   * Unique identifier for the cluster we are a part of.<a name="line.457"></a>
+<span class="sourceLineNo">458</span>   */<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  protected String clusterId;<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  /**<a name="line.461"></a>
+<span class="sourceLineNo">462</span>   * Chore to clean periodically the moved region list<a name="line.462"></a>
+<span class="sourceLineNo">463</span>   */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>  // chore for refreshing store files for secondary regions<a name="line.466"></a>
+<span class="sourceLineNo">467</span>  private StorefileRefresherChore storefileRefresher;<a name="line.467"></a>
 <span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Chore to clean periodically the moved region list<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   */<a name="line.471"></a>
-<span class="sourceLineNo">472</span>  private MovedRegionsCleaner movedRegionsCleaner;<a name="line.472"></a>
-<span class="sourceLineNo">473</span><a name="line.473"></a>
-<span class="sourceLineNo">474</span>  // chore for refreshing store files for secondary regions<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  private StorefileRefresherChore storefileRefresher;<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  private RegionServerCoprocessorHost rsHost;<a name="line.477"></a>
-<span class="sourceLineNo">478</span><a name="line.478"></a>
-<span class="sourceLineNo">479</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.479"></a>
-<span class="sourceLineNo">480</span><a name="line.480"></a>
-<span class="sourceLineNo">481</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>  /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.487"></a>
-<span class="sourceLineNo">488</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * HBASE-3787) are:<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.494"></a>
-<span class="sourceLineNo">495</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.495"></a>
-<span class="sourceLineNo">496</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.500"></a>
-<span class="sourceLineNo">501</span>   */<a name="line.501"></a>
-<span class="sourceLineNo">502</span>  final ServerNonceManager nonceManager;<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>  private UserProvider userProvider;<a name="line.504"></a>
-<span class="sourceLineNo">505</span><a name="line.505"></a>
-<span class="sourceLineNo">506</span>  protected final RSRpcServices rpcServices;<a name="line.506"></a>
+<span class="sourceLineNo">469</span>  private RegionServerCoprocessorHost rsHost;<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  private RegionServerProcedureManagerHost rspmHost;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>  private RegionServerRpcQuotaManager rsQuotaManager;<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  private RegionServerSpaceQuotaManager rsSpaceQuotaManager;<a name="line.474"></a>
+<span class="sourceLineNo">475</span><a name="line.475"></a>
+<span class="sourceLineNo">476</span>  /**<a name="line.476"></a>
+<span class="sourceLineNo">477</span>   * Nonce manager. Nonces are used to make operations like increment and append idempotent<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * in the case where client doesn't receive the response from a successful operation and<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   * retries. We track the successful ops for some time via a nonce sent by client and handle<a name="line.479"></a>
+<span class="sourceLineNo">480</span>   * duplicate operations (currently, by failing them; in future we might use MVCC to return<a name="line.480"></a>
+<span class="sourceLineNo">481</span>   * result). Nonces are also recovered from WAL during, recovery; however, the caveats (from<a name="line.481"></a>
+<span class="sourceLineNo">482</span>   * HBASE-3787) are:<a name="line.482"></a>
+<span class="sourceLineNo">483</span>   * - WAL recovery is optimized, and under high load we won't read nearly nonce-timeout worth<a name="line.483"></a>
+<span class="sourceLineNo">484</span>   *   of past records. If we don't read the records, we don't read and recover the nonces.<a name="line.484"></a>
+<span class="sourceLineNo">485</span>   *   Some WALs within nonce-timeout at recovery may not even be present due to rolling/cleanup.<a name="line.485"></a>
+<span class="sourceLineNo">486</span>   * - There's no WAL recovery during normal region move, so nonces will not be transfered.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * We can have separate additional "Nonce WAL". It will just contain bunch of numbers and<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   * won't be flushed on main path - because WAL itself also contains nonces, if we only flush<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * it before memstore flush, for a given nonce we will either see it in the WAL (if it was<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   * never flushed to disk, it will be part of recovery), or we'll see it as part of the nonce<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * log (or both occasionally, which doesn't matter). Nonce log file can be deleted after the<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * latest nonce in it expired. It can also be recovered during move.<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  final ServerNonceManager nonceManager;<a name="line.494"></a>
+<span class="sourceLineNo">495</span><a name="line.495"></a>
+<span class="sourceLineNo">496</span>  private UserProvider userProvider;<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>  protected final RSRpcServices rpcServices;<a name="line.498"></a>
+<span class="sourceLineNo">499</span><a name="line.499"></a>
+<span class="sourceLineNo">500</span>  protected CoordinatedStateManager csm;<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>  /**<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.503"></a>
+<span class="sourceLineNo">504</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.504"></a>
+<span class="sourceLineNo">505</span>   */<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  protected final ConfigurationManager configurationManager;<a name="line.506"></a>
 <span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  protected CoordinatedStateManager csm;<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Configuration manager is used to register/deregister and notify the configuration observers<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * when the regionserver is notified that there was a change in the on disk configs.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   */<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  protected final ConfigurationManager configurationManager;<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  @VisibleForTesting<a name="line.516"></a>
-<span class="sourceLineNo">517</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.517"></a>
+<span class="sourceLineNo">508</span>  @VisibleForTesting<a name="line.508"></a>
+<span class="sourceLineNo">509</span>  CompactedHFilesDischarger compactedFileDischarger;<a name="line.509"></a>
+<span class="sourceLineNo">510</span><a name="line.510"></a>
+<span class="sourceLineNo">511</span>  private volatile ThroughputController flushThroughputController;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.513"></a>
+<span class="sourceLineNo">514</span><a name="line.514"></a>
+<span class="sourceLineNo">515</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.515"></a>
+<span class="sourceLineNo">516</span><a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.517"></a>
 <span class="sourceLineNo">518</span><a name="line.518"></a>
-<span class="sourceLineNo">519</span>  private volatile ThroughputController flushThroughputController;<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  protected SecureBulkLoadManager secureBulkLoadManager;<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  protected FileSystemUtilizationChore fsUtilizationChore;<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private final NettyEventLoopGroupConfig eventLoopGroupConfig;<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   */<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private final boolean masterless;<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>  /**<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   * Starts a HRegionServer at the default location<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
-<span class="sourceLineNo">539</span>  // Don't start any services or managers in here in the Constructor.<a name="line.539"></a>
-<span class="sourceLineNo">540</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    super("RegionServer");  // thread name<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TraceUtil.initTracer(conf);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    try {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      this.startcode = System.currentTimeMillis();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      this.conf = conf;<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.fsOk = true;<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      HFile.checkHFileVersion(this.conf);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      checkCodecs(this.conf);<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.554"></a>
+<span class="sourceLineNo">519</span>  /**<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   * True if this RegionServer is coming up in a cluster where there is no Master;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>   * means it needs to just come up and make do without a Master to talk to: e.g. in test or<a name="line.521"></a>
+<span class="sourceLineNo">522</span>   * HRegionServer is doing other than its usual duties: e.g. as an hollowed-out host whose only<a name="line.522"></a>
+<span class="sourceLineNo">523</span>   * purpose is as a Replication-stream sink; see HBASE-18846 for more.<a name="line.523"></a>
+<span class="sourceLineNo">524</span>   */<a name="line.524"></a>
+<span class="sourceLineNo">525</span>  private final boolean masterless;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  static final String MASTERLESS_CONFIG_NAME = "hbase.masterless";<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>   * Starts a HRegionServer at the default location<a name="line.529"></a>
+<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>  // Don't start any services or managers in here in the Constructor.<a name="line.531"></a>
+<span class="sourceLineNo">532</span>  // Defer till after we register with the Master as much as possible. See #startServices.<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  public HRegionServer(Configuration conf) throws IOException {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    super("RegionServer");  // thread name<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    TraceUtil.initTracer(conf);<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    try {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>      this.startcode = System.currentTimeMillis();<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      this.conf = conf;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>      this.fsOk = true;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      this.masterless = conf.getBoolean(MASTERLESS_CONFIG_NAME, false);<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      this.eventLoopGroupConfig = setupNetty(this.conf);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      MemorySizeUtil.checkForClusterFreeHeapMemoryLimit(this.conf);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      HFile.checkHFileVersion(this.conf);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      checkCodecs(this.conf);<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      this.userProvider = UserProvider.instantiate(conf);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>      FSUtils.setupShortCircuitRead(this.conf);<a name="line.546"></a>
+<span class="sourceLineNo">547</span><a name="line.547"></a>
+<span class="sourceLineNo">548</span>      // Disable usage of meta replicas in the regionserver<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      // Config'ed params<a name="line.550"></a>
+<span class="sourceLineNo">551</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.553"></a>
+<span class="sourceLineNo">554</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.554"></a>
 <span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>      // Disable usage of meta replicas in the regionserver<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      this.conf.setBoolean(HConstants.USE_META_REPLICAS, false);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      // Config'ed params<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      this.msgInterval = conf.getInt("hbase.regionserver.msginterval", 3 * 1000);<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.564"></a>
+<span class="sourceLineNo">556</span>      this.sleeper = new Sleeper(this.msgInterval, this);<a name="line.556"></a>
+<span class="sourceLineNo">557</span><a name="line.557"></a>
+<span class="sourceLineNo">558</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.561"></a>
+<span class="sourceLineNo">562</span><a name="line.562"></a>
+<span class="sourceLineNo">563</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.564"></a>
 <span class="sourceLineNo">565</span><a name="line.565"></a>
-<span class="sourceLineNo">566</span>      boolean isNoncesEnabled = conf.getBoolean(HConstants.HBASE_RS_NONCES_ENABLED, true);<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      this.nonceManager = isNoncesEnabled ? new ServerNonceManager(this.conf) : null;<a name="line.567"></a>
+<span class="sourceLineNo">566</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.566"></a>
+<span class="sourceLineNo">567</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.567"></a>
 <span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>      this.numRegionsToReport = conf.getInt("hbase.regionserver.numregionstoreport", 10);<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>      this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT,<a name="line.571"></a>
-<span class="sourceLineNo">572</span>          HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      this.shortOperationTimeout = conf.getInt(HConstants.HBASE_RPC_SHORTOPERATION_TIMEOUT_KEY,<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          HConstants.DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT);<a name="line.575"></a>
-<span class="sourceLineNo">576</span><a name="line.576"></a>
-<span class="sourceLineNo">577</span>      this.abortRequested = false;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      this.stopped = false;<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      rpcServices = createRpcServices();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      String hostName =<a name="line.582"></a>
-<span class="sourceLineNo">583</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.583"></a>
-<span class="sourceLineNo">584</span>              : this.useThisHostnameInstead;<a name="line.584"></a>
-<span class="sourceLineNo">585</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>      // login the zookeeper client principal (if using security)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.591"></a>
-<span class="sourceLineNo">592</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.592"></a>
-<span class="sourceLineNo">593</span>      // login the server principal (if using secure Hadoop)<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      login(userProvider, hostName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
-<span class="sourceLineNo">599</span><a name="line.599"></a>
-<span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
-<span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
-<span class="sourceLineNo">608</span><a name="line.608"></a>
-<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
-<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span><a name="line.630"></a>
-<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
-<span class="sourceLineNo">633</span><a name="line.633"></a>
-<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
-<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
-<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
-<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
-<span class="sourceLineNo">661</span><a name="line.661"></a>
-<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
-<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
-<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
-<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
-<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
-<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
-<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
-<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
-<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
-<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
-<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
-<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
-<span class="sourceLineNo">723</span><a name="line.723"></a>
-<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span><a name="line.733"></a>
-<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
-<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
-<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
-<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
-<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">569</span>      this.abortRequested = false;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      this.stopped = false;<a name="line.570"></a>
+<span class="sourceLineNo">571</span><a name="line.571"></a>
+<span class="sourceLineNo">572</span>      rpcServices = createRpcServices();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      useThisHostnameInstead = getUseThisHostnameInstead(conf);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      String hostName =<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          StringUtils.isBlank(useThisHostnameInstead) ? this.rpcServices.isa.getHostName()<a name="line.575"></a>
+<span class="sourceLineNo">576</span>              : this.useThisHostnameInstead;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>      serverName = ServerName.valueOf(hostName, this.rpcServices.isa.getPort(), this.startcode);<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>      rpcControllerFactory = RpcControllerFactory.instantiate(this.conf);<a name="line.579"></a>
+<span class="sourceLineNo">580</span>      rpcRetryingCallerFactory = RpcRetryingCallerFactory.instantiate(this.conf);<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>      // login the zookeeper client principal (if using security)<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      ZKUtil.loginClient(this.conf, HConstants.ZK_CLIENT_KEYTAB_FILE,<a name="line.583"></a>
+<span class="sourceLineNo">584</span>          HConstants.ZK_CLIENT_KERBEROS_PRINCIPAL, hostName);<a name="line.584"></a>
+<span class="sourceLineNo">585</span>      // login the server principal (if using secure Hadoop)<a name="line.585"></a>
+<span class="sourceLineNo">586</span>      login(userProvider, hostName);<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      // init superusers and add the server principal (if using security)<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      // or process owner as default super user.<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      Superusers.initialize(conf);<a name="line.589"></a>
+<span class="sourceLineNo">590</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.590"></a>
+<span class="sourceLineNo">591</span><a name="line.591"></a>
+<span class="sourceLineNo">592</span>      boolean isMasterNotCarryTable =<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>      // no need to instantiate global block cache when master not carry table<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      if (!isMasterNotCarryTable) {<a name="line.595"></a>
+<span class="sourceLineNo">596</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      }<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      cacheConfig = new CacheConfig(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.599"></a>
+<span class="sourceLineNo">600</span><a name="line.600"></a>
+<span class="sourceLineNo">601</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.601"></a>
+<span class="sourceLineNo">602</span>        @Override<a name="line.602"></a>
+<span class="sourceLineNo">603</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      };<a name="line.606"></a>
+<span class="sourceLineNo">607</span><a name="line.607"></a>
+<span class="sourceLineNo">608</span>      initializeFileSystem();<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.609"></a>
+<span class="sourceLineNo">610</span><a name="line.610"></a>
+<span class="sourceLineNo">611</span>      this.configurationManager = new ConfigurationManager();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.612"></a>
+<span class="sourceLineNo">613</span><a name="line.613"></a>
+<span class="sourceLineNo">614</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        // Open connection to zookeeper and set primary watcher<a name="line.616"></a>
+<span class="sourceLineNo">617</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        if (!this.masterless) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.621"></a>
+<span class="sourceLineNo">622</span><a name="line.622"></a>
+<span class="sourceLineNo">623</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          masterAddressTracker.start();<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          clusterStatusTracker.start();<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        } else {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          masterAddressTracker = null;<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          clusterStatusTracker = null;<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        }<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      } else {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>        zooKeeper = null;<a name="line.633"></a>
+<span class="sourceLineNo">634</span>        masterAddressTracker = null;<a name="line.634"></a>
+<span class="sourceLineNo">635</span>        clusterStatusTracker = null;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>      }<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      this.rpcServices.start(zooKeeper);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      // class HRS. TODO.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      this.choreService = new ChoreService(getName(), true);<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      this.executorService = new ExecutorService(getName());<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      putUpWebUI();<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    } catch (Throwable t) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // cause of failed startup is lost.<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      LOG.error("Failed construction RegionServer", t);<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      throw t;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>  }<a name="line.652"></a>
+<span class="sourceLineNo">653</span><a name="line.653"></a>
+<span class="sourceLineNo">654</span>  // HMaster should override this method to load the specific config for master<a name="line.654"></a>
+<span class="sourceLineNo">655</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.659"></a>
+<span class="sourceLineNo">660</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.660"></a>
+<span class="sourceLineNo">661</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.661"></a>
+<span class="sourceLineNo">662</span>        throw new IOException(msg);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>      } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>        return rpcServices.isa.getHostName();<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      return hostname;<a name="line.667"></a>
+<span class="sourceLineNo">668</span>    }<a name="line.668"></a>
+<span class="sourceLineNo">669</span>  }<a name="line.669"></a>
+<span class="sourceLineNo">670</span><a name="line.670"></a>
+<span class="sourceLineNo">671</span>  /**<a name="line.671"></a>
+<span class="sourceLineNo">672</span>   * If running on Windows, do windows-specific setup.<a name="line.672"></a>
+<span class="sourceLineNo">673</span>   */<a name="line.673"></a>
+<span class="sourceLineNo">674</span>  private static void setupWind

<TRUNCATED>

[48/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
index 4da24d26..25fe0d6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
@@ -120,10 +120,6 @@
 <td class="colFirst"><a href="#org.apache.hadoop.hbase.util">org.apache.hadoop.hbase.util</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.zookeeper">org.apache.hadoop.hbase.zookeeper</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
 </tbody>
 </table>
 </li>
@@ -794,49 +790,8 @@
 </tr>
 </tbody>
 </table>
-</li>
-<li class="blockList"><a name="org.apache.hadoop.hbase.util">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> in <a href="../../../../../../org/apache/hadoop/hbase/util/package-summary.html">org.apache.hadoop.hbase.util</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../org/apache/hadoop/hbase/util/package-summary.html">org.apache.hadoop.hbase.util</a> declared as <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></code></td>
-<td class="colLast"><span class="typeNameLabel">HBaseFsck.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#connection">connection</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></code></td>
-<td class="colLast"><span class="typeNameLabel">HBaseFsck.WorkItemRegion.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#connection">connection</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">
-<caption><span>Constructors in <a href="../../../../../../org/apache/hadoop/hbase/util/package-summary.html">org.apache.hadoop.hbase.util</a> with parameters of type <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colOne" scope="col">Constructor and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#WorkItemRegion-org.apache.hadoop.hbase.util.HBaseFsck-org.apache.hadoop.hbase.ServerName-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-org.apache.hadoop.hbase.client.ClusterConnection-">WorkItemRegion</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
-              <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;info,
-              <a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
-              <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection)</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList"><a name="org.apache.hadoop.hbase.zookeeper">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> in <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/package-summary.html">org.apache.hadoop.hbase.zookeeper</a></h3>
 <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/package-summary.html">org.apache.hadoop.hbase.zookeeper</a> with parameters of type <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></span><span class="tabEnd">&nbsp;</span></caption>
+<caption><span>Methods in <a href="../../../../../../org/apache/hadoop/hbase/rsgroup/package-summary.html">org.apache.hadoop.hbase.rsgroup</a> with parameters of type <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
 <th class="colFirst" scope="col">Modifier and Type</th>
 <th class="colLast" scope="col">Method and Description</th>
@@ -844,12 +799,12 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
                    <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
                        <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                        long&nbsp;timeout,
                        int&nbsp;replicaId)</code>
@@ -858,16 +813,16 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>boolean</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
+<td class="colFirst"><code>static boolean</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
                         <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                         long&nbsp;timeout)</code>
 <div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>boolean</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+<td class="colFirst"><code>static boolean</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
                         <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                         long&nbsp;timeout,
                         int&nbsp;replicaId)</code>
@@ -875,8 +830,8 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private boolean</code></td>
-<td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
+<td class="colFirst"><code>private static boolean</code></td>
+<td class="colLast"><span class="typeNameLabel">Utility.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/rsgroup/Utility.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
                     org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;hostingServer,
                     <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;address,
                     byte[]&nbsp;regionName)</code>
@@ -887,6 +842,42 @@
 </tbody>
 </table>
 </li>
+<li class="blockList"><a name="org.apache.hadoop.hbase.util">
+<!--   -->
+</a>
+<h3>Uses of <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> in <a href="../../../../../../org/apache/hadoop/hbase/util/package-summary.html">org.apache.hadoop.hbase.util</a></h3>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
+<caption><span>Fields in <a href="../../../../../../org/apache/hadoop/hbase/util/package-summary.html">org.apache.hadoop.hbase.util</a> declared as <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Field and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HBaseFsck.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html#connection">connection</a></span></code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></code></td>
+<td class="colLast"><span class="typeNameLabel">HBaseFsck.WorkItemRegion.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#connection">connection</a></span></code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">
+<caption><span>Constructors in <a href="../../../../../../org/apache/hadoop/hbase/util/package-summary.html">org.apache.hadoop.hbase.util</a> with parameters of type <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colOne" scope="col">Constructor and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#WorkItemRegion-org.apache.hadoop.hbase.util.HBaseFsck-org.apache.hadoop.hbase.ServerName-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter-org.apache.hadoop.hbase.client.ClusterConnection-">WorkItemRegion</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
+              <a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;info,
+              <a href="../../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
+              <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection)</code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+</li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
index 094abbe..4e44863 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionInfo.html
@@ -7046,28 +7046,28 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;pairs)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegions</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Gets the meta regions for the given path with the default replica ID.</div>
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegions</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
               int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions for the given path and replica ID.</div>
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                           int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions and their locations for the given path and replica ID.</div>
@@ -7083,7 +7083,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">MetaTableLocator.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;pairs)</code>&nbsp;</td>
 </tr>
 </tbody>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index b99d270..27d24f7 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -555,24 +555,24 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/client/package-use.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-use.html b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
index 1d81f16..c609c43 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
@@ -3727,11 +3727,6 @@ service.</div>
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/client/class-use/ClusterConnection.html#org.apache.hadoop.hbase.zookeeper">ClusterConnection</a>
-<div class="block">Internal methods on Connection that should not be used by user code.</div>
-</td>
-</tr>
-<tr class="rowColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/client/class-use/RegionInfo.html#org.apache.hadoop.hbase.zookeeper">RegionInfo</a>
 <div class="block">Information about a region.</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
index ae0124e..ce950b0 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
@@ -104,8 +104,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 72b13ad..9cda462 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -184,13 +184,13 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 454a067..ee33cbe 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -274,12 +274,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheConfig.ExternalBlockCaches</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index df0adf0..c12ae36 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -354,8 +354,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 76f6d8f..b11f6ad 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -293,9 +293,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 </ul>
 </li>


[35/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html
index 53c524e..f69e20a 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.237">ZKUtil.JaasConfiguration</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.236">ZKUtil.JaasConfiguration</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/Configuration.html?is-external=true" title="class or interface in javax.security.auth.login">Configuration</a></pre>
 <div class="block">A JAAS configuration that defines the login modules that we want to use for login.</div>
 </li>
@@ -280,7 +280,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>SERVER_KEYTAB_KERBEROS_CONFIG_NAME</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.238">SERVER_KEYTAB_KERBEROS_CONFIG_NAME</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.237">SERVER_KEYTAB_KERBEROS_CONFIG_NAME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.zookeeper.ZKUtil.JaasConfiguration.SERVER_KEYTAB_KERBEROS_CONFIG_NAME">Constant Field Values</a></dd>
@@ -293,7 +293,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>CLIENT_KEYTAB_KERBEROS_CONFIG_NAME</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.240">CLIENT_KEYTAB_KERBEROS_CONFIG_NAME</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.239">CLIENT_KEYTAB_KERBEROS_CONFIG_NAME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.zookeeper.ZKUtil.JaasConfiguration.CLIENT_KEYTAB_KERBEROS_CONFIG_NAME">Constant Field Values</a></dd>
@@ -306,7 +306,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>BASIC_JAAS_OPTIONS</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.243">BASIC_JAAS_OPTIONS</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.242">BASIC_JAAS_OPTIONS</a></pre>
 </li>
 </ul>
 <a name="KEYTAB_KERBEROS_OPTIONS">
@@ -315,7 +315,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>KEYTAB_KERBEROS_OPTIONS</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.251">KEYTAB_KERBEROS_OPTIONS</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.250">KEYTAB_KERBEROS_OPTIONS</a></pre>
 </li>
 </ul>
 <a name="KEYTAB_KERBEROS_LOGIN">
@@ -324,7 +324,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>KEYTAB_KERBEROS_LOGIN</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/AppConfigurationEntry.html?is-external=true" title="class or interface in javax.security.auth.login">AppConfigurationEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.259">KEYTAB_KERBEROS_LOGIN</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/AppConfigurationEntry.html?is-external=true" title="class or interface in javax.security.auth.login">AppConfigurationEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.258">KEYTAB_KERBEROS_LOGIN</a></pre>
 </li>
 </ul>
 <a name="KEYTAB_KERBEROS_CONF">
@@ -333,7 +333,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>KEYTAB_KERBEROS_CONF</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/AppConfigurationEntry.html?is-external=true" title="class or interface in javax.security.auth.login">AppConfigurationEntry</a>[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.264">KEYTAB_KERBEROS_CONF</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/AppConfigurationEntry.html?is-external=true" title="class or interface in javax.security.auth.login">AppConfigurationEntry</a>[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.263">KEYTAB_KERBEROS_CONF</a></pre>
 </li>
 </ul>
 <a name="baseConfig">
@@ -342,7 +342,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>baseConfig</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/Configuration.html?is-external=true" title="class or interface in javax.security.auth.login">Configuration</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.267">baseConfig</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/Configuration.html?is-external=true" title="class or interface in javax.security.auth.login">Configuration</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.266">baseConfig</a></pre>
 </li>
 </ul>
 <a name="loginContextName">
@@ -351,7 +351,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>loginContextName</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.268">loginContextName</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.267">loginContextName</a></pre>
 </li>
 </ul>
 <a name="useTicketCache">
@@ -360,7 +360,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>useTicketCache</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.269">useTicketCache</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.268">useTicketCache</a></pre>
 </li>
 </ul>
 <a name="keytabFile">
@@ -369,7 +369,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>keytabFile</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.270">keytabFile</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.269">keytabFile</a></pre>
 </li>
 </ul>
 <a name="principal">
@@ -378,7 +378,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockListLast">
 <li class="blockList">
 <h4>principal</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.271">principal</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.270">principal</a></pre>
 </li>
 </ul>
 </li>
@@ -395,7 +395,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockList">
 <li class="blockList">
 <h4>JaasConfiguration</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.273">JaasConfiguration</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;loginContextName,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.272">JaasConfiguration</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;loginContextName,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;keytabFile)</pre>
 </li>
@@ -406,7 +406,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockListLast">
 <li class="blockList">
 <h4>JaasConfiguration</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.277">JaasConfiguration</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;loginContextName,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.276">JaasConfiguration</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;loginContextName,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;keytabFile,
                           boolean&nbsp;useTicketCache)</pre>
@@ -426,7 +426,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/l
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getAppConfigurationEntry</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/AppConfigurationEntry.html?is-external=true" title="class or interface in javax.security.auth.login">AppConfigurationEntry</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.294">getAppConfigurationEntry</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;appName)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/AppConfigurationEntry.html?is-external=true" title="class or interface in javax.security.auth.login">AppConfigurationEntry</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.JaasConfiguration.html#line.293">getAppConfigurationEntry</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;appName)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/Configuration.html?is-external=true#getAppConfigurationEntry-java.lang.String-" title="class or interface in javax.security.auth.login">getAppConfigurationEntry</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/login/Configuration.html?is-external=true" title="class or interface in javax.security.auth.login">Configuration</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html
index c612f85..70831f4 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <br>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.514">ZKUtil.NodeAndData</a>
+public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.513">ZKUtil.NodeAndData</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Simple class to hold a node path and node data.</div>
 </li>
@@ -237,7 +237,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>node</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.515">node</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.514">node</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
 </li>
 </ul>
@@ -247,7 +247,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>data</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.516">data</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.515">data</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
 </li>
 </ul>
@@ -265,7 +265,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>NodeAndData</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.517">NodeAndData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.516">NodeAndData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
                    byte[]&nbsp;data)</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
 </li>
@@ -284,7 +284,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNode</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.521">getNode</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.520">getNode</a>()</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
 </li>
 </ul>
@@ -294,7 +294,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.524">getData</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.523">getData</a>()</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
 </li>
 </ul>
@@ -304,7 +304,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.528">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.527">toString</a>()</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
@@ -318,7 +318,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isEmpty</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.531">isEmpty</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#line.530">isEmpty</a>()</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;</div>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html
index 7cd7c6d..6a90067 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1525">ZKUtil.ZKUtilOp.CreateAndFailSilent</a>
+<pre>public static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1524">ZKUtil.ZKUtilOp.CreateAndFailSilent</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a></pre>
 <div class="block">ZKUtilOp representing createAndFailSilent in ZooKeeper
  (attempt to create node, ignore error if already exists)</div>
@@ -241,7 +241,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>data</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1526">data</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1525">data</a></pre>
 </li>
 </ul>
 </li>
@@ -258,7 +258,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CreateAndFailSilent</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1528">CreateAndFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1527">CreateAndFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
                             byte[]&nbsp;data)</pre>
 </li>
 </ul>
@@ -276,7 +276,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1533">getData</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1532">getData</a>()</pre>
 </li>
 </ul>
 <a name="equals-java.lang.Object-">
@@ -285,7 +285,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1538">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1537">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -298,7 +298,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1551">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.CreateAndFailSilent.html#line.1550">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html
index 08ac264..c95581c 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1561">ZKUtil.ZKUtilOp.DeleteNodeFailSilent</a>
+<pre>public static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1560">ZKUtil.ZKUtilOp.DeleteNodeFailSilent</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a></pre>
 <div class="block">ZKUtilOp representing deleteNodeFailSilent in ZooKeeper
  (attempt to delete node, ignore error if node doesn't exist)</div>
@@ -217,7 +217,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DeleteNodeFailSilent</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html#line.1562">DeleteNodeFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html#line.1561">DeleteNodeFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 </li>
 </ul>
 </li>
@@ -234,7 +234,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html#line.1567">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html#line.1566">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -247,7 +247,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html#line.1579">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.DeleteNodeFailSilent.html#line.1578">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html
index 1d90e4e..30e3773 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1587">ZKUtil.ZKUtilOp.SetData</a>
+<pre>public static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1586">ZKUtil.ZKUtilOp.SetData</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a></pre>
 <div class="block">ZKUtilOp representing setData in ZooKeeper</div>
 </li>
@@ -254,7 +254,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>data</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1588">data</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1587">data</a></pre>
 </li>
 </ul>
 <a name="version">
@@ -263,7 +263,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>version</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1589">version</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1588">version</a></pre>
 </li>
 </ul>
 </li>
@@ -280,7 +280,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>SetData</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1591">SetData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1590">SetData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
                 byte[]&nbsp;data)</pre>
 </li>
 </ul>
@@ -290,7 +290,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SetData</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1596">SetData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1595">SetData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
                 byte[]&nbsp;data,
                 int&nbsp;version)</pre>
 </li>
@@ -309,7 +309,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1602">getData</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1601">getData</a>()</pre>
 </li>
 </ul>
 <a name="getVersion--">
@@ -318,7 +318,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>getVersion</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1606">getVersion</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1605">getVersion</a>()</pre>
 </li>
 </ul>
 <a name="equals-java.lang.Object-">
@@ -327,7 +327,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1611">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1610">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -340,7 +340,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilO
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1625">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.SetData.html#line.1624">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html
index 188d5d7..124ec0a 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public abstract static class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1479">ZKUtil.ZKUtilOp</a>
+<pre>public abstract static class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.html#line.1478">ZKUtil.ZKUtilOp</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Represents an action taken by ZKUtil, e.g. createAndFailSilent.
  These actions are higher-level than ZKOp actions, which represent
@@ -265,7 +265,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>path</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1480">path</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1479">path</a></pre>
 </li>
 </ul>
 </li>
@@ -282,7 +282,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ZKUtilOp</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1482">ZKUtilOp</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1481">ZKUtilOp</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 </li>
 </ul>
 </li>
@@ -299,7 +299,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createAndFailSilent</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1489">createAndFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1488">createAndFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
                                                   byte[]&nbsp;data)</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -313,7 +313,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteNodeFailSilent</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1496">deleteNodeFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1495">deleteNodeFailSilent</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>a deleteNodeFailSilent ZKUtilOP</dd>
@@ -326,7 +326,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setData</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1503">setData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1502">setData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
                                       byte[]&nbsp;data)</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -340,7 +340,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setData</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1510">setData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.ZKUtilOp</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1509">setData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path,
                                       byte[]&nbsp;data,
                                       int&nbsp;version)</pre>
 <dl>
@@ -355,7 +355,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1517">getPath</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/ZKUtil.ZKUtilOp.html#line.1516">getPath</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>path to znode where the ZKOp will occur</dd>


[18/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
index a957d31..62f81b6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
@@ -142,5192 +142,5186 @@
 <span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
 <span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.135"></a>
 <span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.util.Tool;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.util.ToolRunner;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.zookeeper.KeeperException;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.slf4j.Logger;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.slf4j.LoggerFactory;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.161"></a>
-<span class="sourceLineNo">162</span><a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>/**<a name="line.166"></a>
-<span class="sourceLineNo">167</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.167"></a>
-<span class="sourceLineNo">168</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.168"></a>
-<span class="sourceLineNo">169</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.169"></a>
-<span class="sourceLineNo">170</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.170"></a>
-<span class="sourceLineNo">171</span> *<a name="line.171"></a>
-<span class="sourceLineNo">172</span> * &lt;p&gt;<a name="line.172"></a>
-<span class="sourceLineNo">173</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.173"></a>
-<span class="sourceLineNo">174</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.174"></a>
-<span class="sourceLineNo">175</span> * accordance.<a name="line.175"></a>
-<span class="sourceLineNo">176</span> * &lt;p&gt;<a name="line.176"></a>
-<span class="sourceLineNo">177</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.177"></a>
-<span class="sourceLineNo">178</span> * one region of a table.  This means there are no individual degenerate<a name="line.178"></a>
-<span class="sourceLineNo">179</span> * or backwards regions; no holes between regions; and that there are no<a name="line.179"></a>
-<span class="sourceLineNo">180</span> * overlapping regions.<a name="line.180"></a>
-<span class="sourceLineNo">181</span> * &lt;p&gt;<a name="line.181"></a>
-<span class="sourceLineNo">182</span> * The general repair strategy works in two phases:<a name="line.182"></a>
-<span class="sourceLineNo">183</span> * &lt;ol&gt;<a name="line.183"></a>
-<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.184"></a>
-<span class="sourceLineNo">185</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.185"></a>
-<span class="sourceLineNo">186</span> * &lt;/ol&gt;<a name="line.186"></a>
-<span class="sourceLineNo">187</span> * &lt;p&gt;<a name="line.187"></a>
-<span class="sourceLineNo">188</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.188"></a>
-<span class="sourceLineNo">189</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.189"></a>
-<span class="sourceLineNo">190</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.190"></a>
-<span class="sourceLineNo">191</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.191"></a>
-<span class="sourceLineNo">192</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.192"></a>
-<span class="sourceLineNo">193</span> * a new region is created and all data is merged into the new region.<a name="line.193"></a>
-<span class="sourceLineNo">194</span> * &lt;p&gt;<a name="line.194"></a>
-<span class="sourceLineNo">195</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.195"></a>
-<span class="sourceLineNo">196</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.196"></a>
-<span class="sourceLineNo">197</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.197"></a>
-<span class="sourceLineNo">198</span> * an offline fashion.<a name="line.198"></a>
-<span class="sourceLineNo">199</span> * &lt;p&gt;<a name="line.199"></a>
-<span class="sourceLineNo">200</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.200"></a>
-<span class="sourceLineNo">201</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.201"></a>
-<span class="sourceLineNo">202</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.202"></a>
-<span class="sourceLineNo">203</span> * with proper state in the master.<a name="line.203"></a>
-<span class="sourceLineNo">204</span> * &lt;p&gt;<a name="line.204"></a>
-<span class="sourceLineNo">205</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.205"></a>
-<span class="sourceLineNo">206</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.206"></a>
-<span class="sourceLineNo">207</span> * first be called successfully.  Much of the region consistency information<a name="line.207"></a>
-<span class="sourceLineNo">208</span> * is transient and less risky to repair.<a name="line.208"></a>
-<span class="sourceLineNo">209</span> * &lt;p&gt;<a name="line.209"></a>
-<span class="sourceLineNo">210</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.210"></a>
-<span class="sourceLineNo">211</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.211"></a>
-<span class="sourceLineNo">212</span> * {@link #printUsageAndExit()} for more details.<a name="line.212"></a>
-<span class="sourceLineNo">213</span> */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.214"></a>
-<span class="sourceLineNo">215</span>@InterfaceStability.Evolving<a name="line.215"></a>
-<span class="sourceLineNo">216</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static boolean rsSupportsOffline = true;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.222"></a>
-<span class="sourceLineNo">223</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  /**<a name="line.224"></a>
-<span class="sourceLineNo">225</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   */<a name="line.227"></a>
-<span class="sourceLineNo">228</span>  @VisibleForTesting<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.229"></a>
-<span class="sourceLineNo">230</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.230"></a>
-<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.231"></a>
-<span class="sourceLineNo">232</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.234"></a>
-<span class="sourceLineNo">235</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.236"></a>
-<span class="sourceLineNo">237</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.240"></a>
-<span class="sourceLineNo">241</span><a name="line.241"></a>
-<span class="sourceLineNo">242</span>  /**********************<a name="line.242"></a>
-<span class="sourceLineNo">243</span>   * Internal resources<a name="line.243"></a>
-<span class="sourceLineNo">244</span>   **********************/<a name="line.244"></a>
-<span class="sourceLineNo">245</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>  private ClusterMetrics status;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>  private ClusterConnection connection;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private Admin admin;<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  private Table meta;<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.250"></a>
-<span class="sourceLineNo">251</span>  protected ExecutorService executor;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  private HFileCorruptionChecker hfcc;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>  private int retcode = 0;<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  private Path HBCK_LOCK_PATH;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>  private FSDataOutputStream hbckOutFd;<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.258"></a>
-<span class="sourceLineNo">259</span>  // successful<a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>  // Unsupported options in HBase 2.0+<a name="line.262"></a>
-<span class="sourceLineNo">263</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  /***********<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * Options<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   ***********/<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  private static boolean details = false; // do we display the full report<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.274"></a>
-<span class="sourceLineNo">275</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.276"></a>
-<span class="sourceLineNo">277</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.277"></a>
-<span class="sourceLineNo">278</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.278"></a>
-<span class="sourceLineNo">279</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.280"></a>
-<span class="sourceLineNo">281</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.282"></a>
-<span class="sourceLineNo">283</span>  private boolean removeParents = false; // remove split parents<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  // hbase:meta are always checked<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  private TableName cleanReplicationBarrierTable;<a name="line.294"></a>
-<span class="sourceLineNo">295</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  // maximum number of overlapping regions to sideline<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Path sidelineDir = null;<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private static boolean summary = false; // if we want to print less output<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private boolean checkMetaOnly = false;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  private boolean checkRegionBoundaries = false;<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  /*********<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * State<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   *********/<a name="line.309"></a>
-<span class="sourceLineNo">310</span>  final private ErrorReporter errors;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  int fixes = 0;<a name="line.311"></a>
-<span class="sourceLineNo">312</span><a name="line.312"></a>
-<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
-<span class="sourceLineNo">314</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.314"></a>
-<span class="sourceLineNo">315</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.315"></a>
-<span class="sourceLineNo">316</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>   */<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.318"></a>
-<span class="sourceLineNo">319</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * to prevent dupes.<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   *<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   * the meta table<a name="line.330"></a>
-<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  /**<a name="line.334"></a>
-<span class="sourceLineNo">335</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.335"></a>
-<span class="sourceLineNo">336</span>   */<a name="line.336"></a>
-<span class="sourceLineNo">337</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.337"></a>
-<span class="sourceLineNo">338</span><a name="line.338"></a>
-<span class="sourceLineNo">339</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.339"></a>
-<span class="sourceLineNo">340</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>  private ZKWatcher zkw = null;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>  private String hbckEphemeralNodePath = null;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>  private boolean hbckZodeCreated = false;<a name="line.348"></a>
-<span class="sourceLineNo">349</span><a name="line.349"></a>
-<span class="sourceLineNo">350</span>  /**<a name="line.350"></a>
-<span class="sourceLineNo">351</span>   * Constructor<a name="line.351"></a>
-<span class="sourceLineNo">352</span>   *<a name="line.352"></a>
-<span class="sourceLineNo">353</span>   * @param conf Configuration object<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * @throws MasterNotRunningException if the master is not running<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   */<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    this(conf, createThreadPool(conf));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>  }<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
-<span class="sourceLineNo">361</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Constructor<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   *<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * @param conf<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   *          Configuration object<a name="line.370"></a>
-<span class="sourceLineNo">371</span>   * @throws MasterNotRunningException<a name="line.371"></a>
-<span class="sourceLineNo">372</span>   *           if the master is not running<a name="line.372"></a>
-<span class="sourceLineNo">373</span>   * @throws ZooKeeperConnectionException<a name="line.373"></a>
-<span class="sourceLineNo">374</span>   *           if unable to connect to ZooKeeper<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   */<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    super(conf);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    errors = getErrorReporter(getConf());<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    this.executor = exec;<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    zkw = createZooKeeperWatcher();<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  }<a name="line.384"></a>
-<span class="sourceLineNo">385</span><a name="line.385"></a>
-<span class="sourceLineNo">386</span>  /**<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    return new RetryCounterFactory(<a name="line.390"></a>
-<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.391"></a>
-<span class="sourceLineNo">392</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.392"></a>
-<span class="sourceLineNo">393</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.394"></a>
-<span class="sourceLineNo">395</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    return new RetryCounterFactory(<a name="line.402"></a>
-<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.403"></a>
-<span class="sourceLineNo">404</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.404"></a>
-<span class="sourceLineNo">405</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.406"></a>
-<span class="sourceLineNo">407</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.407"></a>
-<span class="sourceLineNo">408</span>  }<a name="line.408"></a>
-<span class="sourceLineNo">409</span><a name="line.409"></a>
-<span class="sourceLineNo">410</span>  /**<a name="line.410"></a>
-<span class="sourceLineNo">411</span>   * @return Return the tmp dir this tool writes too.<a name="line.411"></a>
-<span class="sourceLineNo">412</span>   */<a name="line.412"></a>
-<span class="sourceLineNo">413</span>  @VisibleForTesting<a name="line.413"></a>
-<span class="sourceLineNo">414</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
-<span class="sourceLineNo">417</span><a name="line.417"></a>
-<span class="sourceLineNo">418</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    RetryCounter retryCounter;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    private final Configuration conf;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    private Path hbckLockPath = null;<a name="line.421"></a>
-<span class="sourceLineNo">422</span><a name="line.422"></a>
-<span class="sourceLineNo">423</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      this.retryCounter = retryCounter;<a name="line.424"></a>
-<span class="sourceLineNo">425</span>      this.conf = conf;<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    }<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>    /**<a name="line.428"></a>
-<span class="sourceLineNo">429</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.429"></a>
-<span class="sourceLineNo">430</span>     */<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    Path getHbckLockPath() {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      return this.hbckLockPath;<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    }<a name="line.433"></a>
-<span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    public FSDataOutputStream call() throws IOException {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      try {<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tmpDir = getTmpDir(conf);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        fs.mkdirs(tmpDir);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.446"></a>
-<span class="sourceLineNo">447</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.447"></a>
-<span class="sourceLineNo">448</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.448"></a>
-<span class="sourceLineNo">449</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        out.flush();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return out;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      } catch(RemoteException e) {<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          return null;<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        } else {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          throw e;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.462"></a>
-<span class="sourceLineNo">463</span>        throws IOException {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      IOException exception = null;<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      do {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        } catch (IOException ioe) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.469"></a>
-<span class="sourceLineNo">470</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.470"></a>
-<span class="sourceLineNo">471</span>              + retryCounter.getMaxAttempts());<a name="line.471"></a>
-<span class="sourceLineNo">472</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.472"></a>
-<span class="sourceLineNo">473</span>              ioe);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>          try {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>            exception = ioe;<a name="line.475"></a>
-<span class="sourceLineNo">476</span>            retryCounter.sleepUntilNextRetry();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          } catch (InterruptedException ie) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.478"></a>
-<span class="sourceLineNo">479</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.479"></a>
-<span class="sourceLineNo">480</span>            .initCause(ie);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          }<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      } while (retryCounter.shouldRetry());<a name="line.483"></a>
-<span class="sourceLineNo">484</span><a name="line.484"></a>
-<span class="sourceLineNo">485</span>      throw exception;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  /**<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.490"></a>
-<span class="sourceLineNo">491</span>   *<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * @throws IOException if IO failure occurs<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      RetryCounter retryCounter) throws IOException {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    executor.execute(futureTask);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    final int timeoutInSeconds = conf.getInt(<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    FSDataOutputStream stream = null;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    try {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    } catch (ExecutionException ee) {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    } catch (InterruptedException ie) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      Thread.currentThread().interrupt();<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    } catch (TimeoutException exception) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>      // took too long to obtain lock<a name="line.512"></a>
-<span class="sourceLineNo">513</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      futureTask.cancel(true);<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    } finally {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      executor.shutdownNow();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>    }<a name="line.517"></a>
-<span class="sourceLineNo">518</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
-<span class="sourceLineNo">520</span><a name="line.520"></a>
-<span class="sourceLineNo">521</span>  private void unlockHbck() {<a name="line.521"></a>
-<span class="sourceLineNo">522</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      do {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>        try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          FSUtils.delete(FSUtils.getCurrentFileSystem(getConf()), HBCK_LOCK_PATH, true);<a name="line.527"></a>
-<span class="sourceLineNo">528</span>          LOG.info("Finishing hbck");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          return;<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        } catch (IOException ioe) {<a name="line.530"></a>
-<span class="sourceLineNo">531</span>          LOG.info("Failed to delete " + HBCK_LOCK_PATH + ", try="<a name="line.531"></a>
-<span class="sourceLineNo">532</span>              + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.532"></a>
-<span class="sourceLineNo">533</span>              + retryCounter.getMaxAttempts());<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          LOG.debug("Failed to delete " + HBCK_LOCK_PATH, ioe);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          try {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>            retryCounter.sleepUntilNextRetry();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>          } catch (InterruptedException ie) {<a name="line.537"></a>
-<span class="sourceLineNo">538</span>            Thread.currentThread().interrupt();<a name="line.538"></a>
-<span class="sourceLineNo">539</span>            LOG.warn("Interrupted while deleting lock file" +<a name="line.539"></a>
-<span class="sourceLineNo">540</span>                HBCK_LOCK_PATH);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>            return;<a name="line.541"></a>
-<span class="sourceLineNo">542</span>          }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>        }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      } while (retryCounter.shouldRetry());<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  /**<a name="line.548"></a>
-<span class="sourceLineNo">549</span>   * To repair region consistency, one must call connect() in order to repair<a name="line.549"></a>
-<span class="sourceLineNo">550</span>   * online state.<a name="line.550"></a>
-<span class="sourceLineNo">551</span>   */<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  public void connect() throws IOException {<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>    if (isExclusive()) {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      // Grab the lock<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      Pair&lt;Path, FSDataOutputStream&gt; pair =<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          checkAndMarkRunningHbck(getConf(), this.lockFileRetryCounterFactory.create());<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      HBCK_LOCK_PATH = pair.getFirst();<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      this.hbckOutFd = pair.getSecond();<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      if (hbckOutFd == null) {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        setRetCode(-1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        LOG.error("Another instance of hbck is fixing HBase, exiting this instance. " +<a name="line.562"></a>
-<span class="sourceLineNo">563</span>            "[If you are sure no other instance is running, delete the lock file " +<a name="line.563"></a>
-<span class="sourceLineNo">564</span>            HBCK_LOCK_PATH + " and rerun the tool]");<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw new IOException("Duplicate hbck - Abort");<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span><a name="line.567"></a>
-<span class="sourceLineNo">568</span>      // Make sure to cleanup the lock<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      hbckLockCleanup.set(true);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    }<a name="line.570"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.security.UserGroupInformation;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.util.ReflectionUtils;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.util.Tool;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.util.ToolRunner;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.yetus.audience.InterfaceStability;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.zookeeper.KeeperException;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.slf4j.Logger;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.slf4j.LoggerFactory;<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hbase.thirdparty.com.google.common.base.Joiner;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hbase.thirdparty.com.google.common.collect.Ordering;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hbase.thirdparty.com.google.common.collect.Sets;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hbase.thirdparty.com.google.common.collect.TreeMultimap;<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>/**<a name="line.165"></a>
+<span class="sourceLineNo">166</span> * HBaseFsck (hbck) is a tool for checking and repairing region consistency and<a name="line.166"></a>
+<span class="sourceLineNo">167</span> * table integrity problems in a corrupted HBase. This tool was written for hbase-1.x. It does not<a name="line.167"></a>
+<span class="sourceLineNo">168</span> * work with hbase-2.x; it can read state but is not allowed to change state; i.e. effect 'repair'.<a name="line.168"></a>
+<span class="sourceLineNo">169</span> * See hbck2 (HBASE-19121) for a hbck tool for hbase2.<a name="line.169"></a>
+<span class="sourceLineNo">170</span> *<a name="line.170"></a>
+<span class="sourceLineNo">171</span> * &lt;p&gt;<a name="line.171"></a>
+<span class="sourceLineNo">172</span> * Region consistency checks verify that hbase:meta, region deployment on region<a name="line.172"></a>
+<span class="sourceLineNo">173</span> * servers and the state of data in HDFS (.regioninfo files) all are in<a name="line.173"></a>
+<span class="sourceLineNo">174</span> * accordance.<a name="line.174"></a>
+<span class="sourceLineNo">175</span> * &lt;p&gt;<a name="line.175"></a>
+<span class="sourceLineNo">176</span> * Table integrity checks verify that all possible row keys resolve to exactly<a name="line.176"></a>
+<span class="sourceLineNo">177</span> * one region of a table.  This means there are no individual degenerate<a name="line.177"></a>
+<span class="sourceLineNo">178</span> * or backwards regions; no holes between regions; and that there are no<a name="line.178"></a>
+<span class="sourceLineNo">179</span> * overlapping regions.<a name="line.179"></a>
+<span class="sourceLineNo">180</span> * &lt;p&gt;<a name="line.180"></a>
+<span class="sourceLineNo">181</span> * The general repair strategy works in two phases:<a name="line.181"></a>
+<span class="sourceLineNo">182</span> * &lt;ol&gt;<a name="line.182"></a>
+<span class="sourceLineNo">183</span> * &lt;li&gt; Repair Table Integrity on HDFS. (merge or fabricate regions)<a name="line.183"></a>
+<span class="sourceLineNo">184</span> * &lt;li&gt; Repair Region Consistency with hbase:meta and assignments<a name="line.184"></a>
+<span class="sourceLineNo">185</span> * &lt;/ol&gt;<a name="line.185"></a>
+<span class="sourceLineNo">186</span> * &lt;p&gt;<a name="line.186"></a>
+<span class="sourceLineNo">187</span> * For table integrity repairs, the tables' region directories are scanned<a name="line.187"></a>
+<span class="sourceLineNo">188</span> * for .regioninfo files.  Each table's integrity is then verified.  If there<a name="line.188"></a>
+<span class="sourceLineNo">189</span> * are any orphan regions (regions with no .regioninfo files) or holes, new<a name="line.189"></a>
+<span class="sourceLineNo">190</span> * regions are fabricated.  Backwards regions are sidelined as well as empty<a name="line.190"></a>
+<span class="sourceLineNo">191</span> * degenerate (endkey==startkey) regions.  If there are any overlapping regions,<a name="line.191"></a>
+<span class="sourceLineNo">192</span> * a new region is created and all data is merged into the new region.<a name="line.192"></a>
+<span class="sourceLineNo">193</span> * &lt;p&gt;<a name="line.193"></a>
+<span class="sourceLineNo">194</span> * Table integrity repairs deal solely with HDFS and could potentially be done<a name="line.194"></a>
+<span class="sourceLineNo">195</span> * offline -- the hbase region servers or master do not need to be running.<a name="line.195"></a>
+<span class="sourceLineNo">196</span> * This phase can eventually be used to completely reconstruct the hbase:meta table in<a name="line.196"></a>
+<span class="sourceLineNo">197</span> * an offline fashion.<a name="line.197"></a>
+<span class="sourceLineNo">198</span> * &lt;p&gt;<a name="line.198"></a>
+<span class="sourceLineNo">199</span> * Region consistency requires three conditions -- 1) valid .regioninfo file<a name="line.199"></a>
+<span class="sourceLineNo">200</span> * present in an HDFS region dir,  2) valid row with .regioninfo data in META,<a name="line.200"></a>
+<span class="sourceLineNo">201</span> * and 3) a region is deployed only at the regionserver that was assigned to<a name="line.201"></a>
+<span class="sourceLineNo">202</span> * with proper state in the master.<a name="line.202"></a>
+<span class="sourceLineNo">203</span> * &lt;p&gt;<a name="line.203"></a>
+<span class="sourceLineNo">204</span> * Region consistency repairs require hbase to be online so that hbck can<a name="line.204"></a>
+<span class="sourceLineNo">205</span> * contact the HBase master and region servers.  The hbck#connect() method must<a name="line.205"></a>
+<span class="sourceLineNo">206</span> * first be called successfully.  Much of the region consistency information<a name="line.206"></a>
+<span class="sourceLineNo">207</span> * is transient and less risky to repair.<a name="line.207"></a>
+<span class="sourceLineNo">208</span> * &lt;p&gt;<a name="line.208"></a>
+<span class="sourceLineNo">209</span> * If hbck is run from the command line, there are a handful of arguments that<a name="line.209"></a>
+<span class="sourceLineNo">210</span> * can be used to limit the kinds of repairs hbck will do.  See the code in<a name="line.210"></a>
+<span class="sourceLineNo">211</span> * {@link #printUsageAndExit()} for more details.<a name="line.211"></a>
+<span class="sourceLineNo">212</span> */<a name="line.212"></a>
+<span class="sourceLineNo">213</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>@InterfaceStability.Evolving<a name="line.214"></a>
+<span class="sourceLineNo">215</span>public class HBaseFsck extends Configured implements Closeable {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  public static final long DEFAULT_TIME_LAG = 60000; // default value of 1 minute<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public static final long DEFAULT_SLEEP_BEFORE_RERUN = 10000;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private static final int MAX_NUM_THREADS = 50; // #threads to contact regions<a name="line.218"></a>
+<span class="sourceLineNo">219</span>  private static boolean rsSupportsOffline = true;<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  private static final int DEFAULT_OVERLAPS_TO_SIDELINE = 2;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  private static final int DEFAULT_MAX_MERGE = 5;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  private static final String TO_BE_LOADED = "to_be_loaded";<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  /**<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   * Here is where hbase-1.x used to default the lock for hbck1.<a name="line.224"></a>
+<span class="sourceLineNo">225</span>   * It puts in place a lock when it goes to write/make changes.<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   */<a name="line.226"></a>
+<span class="sourceLineNo">227</span>  @VisibleForTesting<a name="line.227"></a>
+<span class="sourceLineNo">228</span>  public static final String HBCK_LOCK_FILE = "hbase-hbck.lock";<a name="line.228"></a>
+<span class="sourceLineNo">229</span>  private static final int DEFAULT_MAX_LOCK_FILE_ATTEMPTS = 5;<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.230"></a>
+<span class="sourceLineNo">231</span>  private static final int DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  // We have to set the timeout value &gt; HdfsConstants.LEASE_SOFTLIMIT_PERIOD.<a name="line.232"></a>
+<span class="sourceLineNo">233</span>  // In HADOOP-2.6 and later, the Namenode proxy now created with custom RetryPolicy for<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  // AlreadyBeingCreatedException which is implies timeout on this operations up to<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  // HdfsConstants.LEASE_SOFTLIMIT_PERIOD (60 seconds).<a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static final int DEFAULT_WAIT_FOR_LOCK_TIMEOUT = 80; // seconds<a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private static final int DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS = 5;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL = 200; // milliseconds<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  private static final int DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME = 5000; // milliseconds<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  /**********************<a name="line.241"></a>
+<span class="sourceLineNo">242</span>   * Internal resources<a name="line.242"></a>
+<span class="sourceLineNo">243</span>   **********************/<a name="line.243"></a>
+<span class="sourceLineNo">244</span>  private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>  private ClusterMetrics status;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  private ClusterConnection connection;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>  private Admin admin;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private Table meta;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  // threads to do ||izable tasks: retrieve data from regionservers, handle overlapping regions<a name="line.249"></a>
+<span class="sourceLineNo">250</span>  protected ExecutorService executor;<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  private long startMillis = EnvironmentEdgeManager.currentTime();<a name="line.251"></a>
+<span class="sourceLineNo">252</span>  private HFileCorruptionChecker hfcc;<a name="line.252"></a>
+<span class="sourceLineNo">253</span>  private int retcode = 0;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  private Path HBCK_LOCK_PATH;<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  private FSDataOutputStream hbckOutFd;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  // This lock is to prevent cleanup of balancer resources twice between<a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // ShutdownHook and the main code. We cleanup only if the connect() is<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  // successful<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  private final AtomicBoolean hbckLockCleanup = new AtomicBoolean(false);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  // Unsupported options in HBase 2.0+<a name="line.261"></a>
+<span class="sourceLineNo">262</span>  private static final Set&lt;String&gt; unsupportedOptionsInV2 = Sets.newHashSet("-fix",<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      "-fixAssignments", "-fixMeta", "-fixHdfsHoles", "-fixHdfsOrphans", "-fixTableOrphans",<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      "-fixHdfsOverlaps", "-sidelineBigOverlaps", "-fixSplitParents", "-removeParents",<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      "-fixEmptyMetaCells", "-repair", "-repairHoles", "-maxOverlapsToSideline", "-maxMerge");<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /***********<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Options<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   ***********/<a name="line.269"></a>
+<span class="sourceLineNo">270</span>  private static boolean details = false; // do we display the full report<a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private long timelag = DEFAULT_TIME_LAG; // tables whose modtime is older<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  private static boolean forceExclusive = false; // only this hbck can modify HBase<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  private boolean fixAssignments = false; // fix assignment errors?<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  private boolean fixMeta = false; // fix meta errors?<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  private boolean checkHdfs = true; // load and check fs consistency?<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  private boolean fixHdfsHoles = false; // fix fs holes?<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private boolean fixHdfsOverlaps = false; // fix fs overlaps (risky)<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  private boolean fixHdfsOrphans = false; // fix fs holes (missing .regioninfo)<a name="line.278"></a>
+<span class="sourceLineNo">279</span>  private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)<a name="line.279"></a>
+<span class="sourceLineNo">280</span>  private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs<a name="line.280"></a>
+<span class="sourceLineNo">281</span>  private boolean fixSplitParents = false; // fix lingering split parents<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  private boolean removeParents = false; // remove split parents<a name="line.282"></a>
+<span class="sourceLineNo">283</span>  private boolean fixReferenceFiles = false; // fix lingering reference store file<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean fixHFileLinks = false; // fix lingering HFileLinks<a name="line.284"></a>
+<span class="sourceLineNo">285</span>  private boolean fixEmptyMetaCells = false; // fix (remove) empty REGIONINFO_QUALIFIER rows<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  private boolean fixReplication = false; // fix undeleted replication queues for removed peer<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  private boolean cleanReplicationBarrier = false; // clean replication barriers of a table<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean fixAny = false; // Set to true if any of the fix is required.<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  // limit checking/fixes to listed tables, if empty attempt to check/fix all<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  // hbase:meta are always checked<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private Set&lt;TableName&gt; tablesIncluded = new HashSet&lt;&gt;();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private TableName cleanReplicationBarrierTable;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private int maxMerge = DEFAULT_MAX_MERGE; // maximum number of overlapping regions to merge<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  // maximum number of overlapping regions to sideline<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  private int maxOverlapsToSideline = DEFAULT_OVERLAPS_TO_SIDELINE;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>  private boolean sidelineBigOverlaps = false; // sideline overlaps with &gt;maxMerge regions<a name="line.297"></a>
+<span class="sourceLineNo">298</span>  private Path sidelineDir = null;<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>  private boolean rerun = false; // if we tried to fix something, rerun hbck<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private static boolean summary = false; // if we want to print less output<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  private boolean checkMetaOnly = false;<a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private boolean checkRegionBoundaries = false;<a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private boolean ignorePreCheckPermission = false; // if pre-check permission<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>  /*********<a name="line.306"></a>
+<span class="sourceLineNo">307</span>   * State<a name="line.307"></a>
+<span class="sourceLineNo">308</span>   *********/<a name="line.308"></a>
+<span class="sourceLineNo">309</span>  final private ErrorReporter errors;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  int fixes = 0;<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * This map contains the state of all hbck items.  It maps from encoded region<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * name to HbckInfo structure.  The information contained in HbckInfo is used<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * to detect and correct consistency (hdfs/meta/deployment) problems.<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   */<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  private TreeMap&lt;String, HbckInfo&gt; regionInfoMap = new TreeMap&lt;&gt;();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  // Empty regioninfo qualifiers in hbase:meta<a name="line.318"></a>
+<span class="sourceLineNo">319</span>  private Set&lt;Result&gt; emptyRegionInfoQualifiers = new HashSet&lt;&gt;();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>  /**<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * This map from Tablename -&gt; TableInfo contains the structures necessary to<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * detect table consistency problems (holes, dupes, overlaps).  It is sorted<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * to prevent dupes.<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   *<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * If tablesIncluded is empty, this map contains all tables.<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * Otherwise, it contains only meta tables and tables in tablesIncluded,<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * unless checkMetaOnly is specified, in which case, it contains only<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * the meta table<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
+<span class="sourceLineNo">331</span>  private SortedMap&lt;TableName, TableInfo&gt; tablesInfo = new ConcurrentSkipListMap&lt;&gt;();<a name="line.331"></a>
+<span class="sourceLineNo">332</span><a name="line.332"></a>
+<span class="sourceLineNo">333</span>  /**<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * When initially looking at HDFS, we attempt to find any orphaned data.<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   */<a name="line.335"></a>
+<span class="sourceLineNo">336</span>  private List&lt;HbckInfo&gt; orphanHdfsDirs = Collections.synchronizedList(new ArrayList&lt;HbckInfo&gt;());<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; orphanTableDirs = new HashMap&lt;&gt;();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>  private Map&lt;TableName, TableState&gt; tableStates = new HashMap&lt;&gt;();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  private final RetryCounterFactory lockFileRetryCounterFactory;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>  private final RetryCounterFactory createZNodeRetryCounterFactory;<a name="line.341"></a>
+<span class="sourceLineNo">342</span><a name="line.342"></a>
+<span class="sourceLineNo">343</span>  private Map&lt;TableName, Set&lt;String&gt;&gt; skippedRegions = new HashMap&lt;&gt;();<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>  private ZKWatcher zkw = null;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  private String hbckEphemeralNodePath = null;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>  private boolean hbckZodeCreated = false;<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>  /**<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * Constructor<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   *<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * @param conf Configuration object<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * @throws MasterNotRunningException if the master is not running<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * @throws ZooKeeperConnectionException if unable to connect to ZooKeeper<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   */<a name="line.355"></a>
+<span class="sourceLineNo">356</span>  public HBaseFsck(Configuration conf) throws IOException, ClassNotFoundException {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    this(conf, createThreadPool(conf));<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  private static ExecutorService createThreadPool(Configuration conf) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    int numThreads = conf.getInt("hbasefsck.numthreads", MAX_NUM_THREADS);<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    return new ScheduledThreadPoolExecutor(numThreads, Threads.newDaemonThreadFactory("hbasefsck"));<a name="line.362"></a>
+<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
+<span class="sourceLineNo">364</span><a name="line.364"></a>
+<span class="sourceLineNo">365</span>  /**<a name="line.365"></a>
+<span class="sourceLineNo">366</span>   * Constructor<a name="line.366"></a>
+<span class="sourceLineNo">367</span>   *<a name="line.367"></a>
+<span class="sourceLineNo">368</span>   * @param conf<a name="line.368"></a>
+<span class="sourceLineNo">369</span>   *          Configuration object<a name="line.369"></a>
+<span class="sourceLineNo">370</span>   * @throws MasterNotRunningException<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   *           if the master is not running<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @throws ZooKeeperConnectionException<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *           if unable to connect to ZooKeeper<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  public HBaseFsck(Configuration conf, ExecutorService exec) throws MasterNotRunningException,<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      ZooKeeperConnectionException, IOException, ClassNotFoundException {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    super(conf);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    errors = getErrorReporter(getConf());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    this.executor = exec;<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    lockFileRetryCounterFactory = createLockRetryCounterFactory(getConf());<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    createZNodeRetryCounterFactory = createZnodeRetryCounterFactory(getConf());<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    zkw = createZooKeeperWatcher();<a name="line.382"></a>
+<span class="sourceLineNo">383</span>  }<a name="line.383"></a>
+<span class="sourceLineNo">384</span><a name="line.384"></a>
+<span class="sourceLineNo">385</span>  /**<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   * @return A retry counter factory configured for retrying lock file creation.<a name="line.386"></a>
+<span class="sourceLineNo">387</span>   */<a name="line.387"></a>
+<span class="sourceLineNo">388</span>  public static RetryCounterFactory createLockRetryCounterFactory(Configuration conf) {<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    return new RetryCounterFactory(<a name="line.389"></a>
+<span class="sourceLineNo">390</span>        conf.getInt("hbase.hbck.lockfile.attempts", DEFAULT_MAX_LOCK_FILE_ATTEMPTS),<a name="line.390"></a>
+<span class="sourceLineNo">391</span>        conf.getInt("hbase.hbck.lockfile.attempt.sleep.interval",<a name="line.391"></a>
+<span class="sourceLineNo">392</span>            DEFAULT_LOCK_FILE_ATTEMPT_SLEEP_INTERVAL),<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        conf.getInt("hbase.hbck.lockfile.attempt.maxsleeptime",<a name="line.393"></a>
+<span class="sourceLineNo">394</span>            DEFAULT_LOCK_FILE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.394"></a>
+<span class="sourceLineNo">395</span>  }<a name="line.395"></a>
+<span class="sourceLineNo">396</span><a name="line.396"></a>
+<span class="sourceLineNo">397</span>  /**<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   * @return A retry counter factory configured for retrying znode creation.<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  private static RetryCounterFactory createZnodeRetryCounterFactory(Configuration conf) {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return new RetryCounterFactory(<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        conf.getInt("hbase.hbck.createznode.attempts", DEFAULT_MAX_CREATE_ZNODE_ATTEMPTS),<a name="line.402"></a>
+<span class="sourceLineNo">403</span>        conf.getInt("hbase.hbck.createznode.attempt.sleep.interval",<a name="line.403"></a>
+<span class="sourceLineNo">404</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_SLEEP_INTERVAL),<a name="line.404"></a>
+<span class="sourceLineNo">405</span>        conf.getInt("hbase.hbck.createznode.attempt.maxsleeptime",<a name="line.405"></a>
+<span class="sourceLineNo">406</span>            DEFAULT_CREATE_ZNODE_ATTEMPT_MAX_SLEEP_TIME));<a name="line.406"></a>
+<span class="sourceLineNo">407</span>  }<a name="line.407"></a>
+<span class="sourceLineNo">408</span><a name="line.408"></a>
+<span class="sourceLineNo">409</span>  /**<a name="line.409"></a>
+<span class="sourceLineNo">410</span>   * @return Return the tmp dir this tool writes too.<a name="line.410"></a>
+<span class="sourceLineNo">411</span>   */<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  @VisibleForTesting<a name="line.412"></a>
+<span class="sourceLineNo">413</span>  public static Path getTmpDir(Configuration conf) throws IOException {<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    return new Path(FSUtils.getRootDir(conf), HConstants.HBASE_TEMP_DIRECTORY);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>  }<a name="line.415"></a>
+<span class="sourceLineNo">416</span><a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private static class FileLockCallable implements Callable&lt;FSDataOutputStream&gt; {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>    RetryCounter retryCounter;<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    private final Configuration conf;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    private Path hbckLockPath = null;<a name="line.420"></a>
+<span class="sourceLineNo">421</span><a name="line.421"></a>
+<span class="sourceLineNo">422</span>    public FileLockCallable(Configuration conf, RetryCounter retryCounter) {<a name="line.422"></a>
+<span class="sourceLineNo">423</span>      this.retryCounter = retryCounter;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      this.conf = conf;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    /**<a name="line.427"></a>
+<span class="sourceLineNo">428</span>     * @return Will be &lt;code&gt;null&lt;/code&gt; unless you call {@link #call()}<a name="line.428"></a>
+<span class="sourceLineNo">429</span>     */<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    Path getHbckLockPath() {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      return this.hbckLockPath;<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    }<a name="line.432"></a>
+<span class="sourceLineNo">433</span><a name="line.433"></a>
+<span class="sourceLineNo">434</span>    @Override<a name="line.434"></a>
+<span class="sourceLineNo">435</span>    public FSDataOutputStream call() throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>      try {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>        FileSystem fs = FSUtils.getCurrentFileSystem(this.conf);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>        FsPermission defaultPerms = FSUtils.getFilePermissions(fs, this.conf,<a name="line.438"></a>
+<span class="sourceLineNo">439</span>            HConstants.DATA_FILE_UMASK_KEY);<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        Path tmpDir = getTmpDir(conf);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        this.hbckLockPath = new Path(tmpDir, HBCK_LOCK_FILE);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        fs.mkdirs(tmpDir);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        final FSDataOutputStream out = createFileWithRetries(fs, this.hbckLockPath, defaultPerms);<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        out.writeBytes(InetAddress.getLocalHost().toString());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>        // Add a note into the file we write on why hbase2 is writing out an hbck1 lock file.<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        out.writeBytes(" Written by an hbase-2.x Master to block an " +<a name="line.446"></a>
+<span class="sourceLineNo">447</span>            "attempt by an hbase-1.x HBCK tool making modification to state. " +<a name="line.447"></a>
+<span class="sourceLineNo">448</span>            "See 'HBCK must match HBase server version' in the hbase refguide.");<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        out.flush();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return out;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      } catch(RemoteException e) {<a name="line.451"></a>
+<span class="sourceLineNo">452</span>        if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          return null;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>        } else {<a name="line.454"></a>
+<span class="sourceLineNo">455</span>          throw e;<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        }<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    }<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>    private FSDataOutputStream createFileWithRetries(final FileSystem fs,<a name="line.460"></a>
+<span class="sourceLineNo">461</span>        final Path hbckLockFilePath, final FsPermission defaultPerms)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>        throws IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      IOException exception = null;<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      do {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        try {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>          return FSUtils.create(fs, hbckLockFilePath, defaultPerms, false);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>        } catch (IOException ioe) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>          LOG.info("Failed to create lock file " + hbckLockFilePath.getName()<a name="line.468"></a>
+<span class="sourceLineNo">469</span>              + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              + retryCounter.getMaxAttempts());<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),<a name="line.471"></a>
+<span class="sourceLineNo">472</span>              ioe);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          try {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>            exception = ioe;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>            retryCounter.sleepUntilNextRetry();<a name="line.475"></a>
+<span class="sourceLineNo">476</span>          } catch (InterruptedException ie) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>            throw (InterruptedIOException) new InterruptedIOException(<a name="line.477"></a>
+<span class="sourceLineNo">478</span>                "Can't create lock file " + hbckLockFilePath.getName())<a name="line.478"></a>
+<span class="sourceLineNo">479</span>            .initCause(ie);<a name="line.479"></a>
+<span class="sourceLineNo">480</span>          }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        }<a name="line.481"></a>
+<span class="sourceLineNo">482</span>      } while (retryCounter.shouldRetry());<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>      throw exception;<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    }<a name="line.485"></a>
+<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
+<span class="sourceLineNo">487</span><a name="line.487"></a>
+<span class="sourceLineNo">488</span>  /**<a name="line.488"></a>
+<span class="sourceLineNo">489</span>   * This method maintains a lock using a file. If the creation fails we return null<a name="line.489"></a>
+<span class="sourceLineNo">490</span>   *<a name="line.490"></a>
+<span class="sourceLineNo">491</span>   * @return FSDataOutputStream object corresponding to the newly opened lock file<a name="line.491"></a>
+<span class="sourceLineNo">492</span>   * @throws IOException if IO failure occurs<a name="line.492"></a>
+<span class="sourceLineNo">493</span>   */<a name="line.493"></a>
+<span class="sourceLineNo">494</span>  public static Pair&lt;Path, FSDataOutputStream&gt; checkAndMarkRunningHbck(Configuration conf,<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      RetryCounter retryCounter) throws IOException {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    FileLockCallable callable = new FileLockCallable(conf, retryCounter);<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    ExecutorService executor = Executors.newFixedThreadPool(1);<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    FutureTask&lt;FSDataOutputStream&gt; futureTask = new FutureTask&lt;&gt;(callable);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    executor.execute(futureTask);<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    final int timeoutInSeconds = conf.getInt(<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      "hbase.hbck.lockfile.maxwaittime", DEFAULT_WAIT_FOR_LOCK_TIMEOUT);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>    FSDataOutputStream stream = null;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    try {<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      stream = futureTask.get(timeoutInSeconds, TimeUnit.SECONDS);<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    } catch (ExecutionException ee) {<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.warn("Encountered exception when opening lock file", ee);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>    } catch (InterruptedException ie) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      LOG.warn("Interrupted when opening lock file", ie);<a name="line.508"></a>
+<span class="sourceLineNo">509</span>      Thread.currentThread().interrupt();<a name="line.509"></a>
+<span class="sourceLineNo">510</span>    } catch (TimeoutException exception) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>      // took too long to obtain lock<a name="line.511"></a>
+<span class="sourceLineNo">512</span>      LOG.warn("Took more than " + timeoutInSeconds + " seconds in obtaining lock");<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      futureTask.cancel(true);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>    } finally {<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      executor.shutdownNow();<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    }<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    return new Pair&lt;Path, FSDataOutputStream&gt;(callable.getHbckLockPath(), stream);<a name="line.517"></a>
+<span class="sourceLineNo">518</span>  }<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>  private void unlockHbck() {<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    if (isExclusive() &amp;&amp; hbckLockCleanup.compareAndSet(true, false)) {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      RetryCounter retryCounter = lockFileRetryCounterFactory.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      do {<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        try {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          IOUtils.closeQuietly(hbckOutFd);<a name="line.525"></a>
+<span class="sourceLineNo">

<TRUNCATED>

[32/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
index 7b680e9..a730a53 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/MasterMetaBootstrap.html
@@ -73,55 +73,54 @@
 <span class="sourceLineNo">065</span>      throw new IllegalStateException("hbase:meta must be initialized first before we can " +<a name="line.65"></a>
 <span class="sourceLineNo">066</span>          "assign out its replicas");<a name="line.66"></a>
 <span class="sourceLineNo">067</span>    }<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    ServerName metaServername =<a name="line.68"></a>
-<span class="sourceLineNo">069</span>        this.master.getMetaTableLocator().getMetaRegionLocation(this.master.getZooKeeper());<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    for (int i = 1; i &lt; numReplicas; i++) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      // Get current meta state for replica from zk.<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      RegionState metaState = MetaTableLocator.getMetaRegionState(master.getZooKeeper(), i);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      RegionInfo hri = RegionReplicaUtil.getRegionInfoForReplica(<a name="line.73"></a>
-<span class="sourceLineNo">074</span>          RegionInfoBuilder.FIRST_META_REGIONINFO, i);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>      LOG.debug(hri.getRegionNameAsString() + " replica region state from zookeeper=" + metaState);<a name="line.75"></a>
-<span class="sourceLineNo">076</span>      if (metaServername.equals(metaState.getServerName())) {<a name="line.76"></a>
-<span class="sourceLineNo">077</span>        metaState = null;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>        LOG.info(hri.getRegionNameAsString() +<a name="line.78"></a>
-<span class="sourceLineNo">079</span>          " old location is same as current hbase:meta location; setting location as null...");<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      }<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      // These assigns run inline. All is blocked till they complete. Only interrupt is shutting<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      // down hosting server which calls AM#stop.<a name="line.82"></a>
-<span class="sourceLineNo">083</span>      if (metaState != null &amp;&amp; metaState.getServerName() != null) {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>        // Try to retain old assignment.<a name="line.84"></a>
-<span class="sourceLineNo">085</span>        assignmentManager.assign(hri, metaState.getServerName());<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      } else {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>        assignmentManager.assign(hri);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      }<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    unassignExcessMetaReplica(numReplicas);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  }<a name="line.91"></a>
-<span class="sourceLineNo">092</span><a name="line.92"></a>
-<span class="sourceLineNo">093</span>  private void unassignExcessMetaReplica(int numMetaReplicasConfigured) {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    final ZKWatcher zooKeeper = master.getZooKeeper();<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    // unassign the unneeded replicas (for e.g., if the previous master was configured<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    // with a replication of 3 and now it is 2, we need to unassign the 1 unneeded replica)<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    try {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>      List&lt;String&gt; metaReplicaZnodes = zooKeeper.getMetaReplicaNodes();<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      for (String metaReplicaZnode : metaReplicaZnodes) {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>        int replicaId = zooKeeper.getZNodePaths().getMetaReplicaIdFromZnode(metaReplicaZnode);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>        if (replicaId &gt;= numMetaReplicasConfigured) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>          RegionState r = MetaTableLocator.getMetaRegionState(zooKeeper, replicaId);<a name="line.102"></a>
-<span class="sourceLineNo">103</span>          LOG.info("Closing excess replica of meta region " + r.getRegion());<a name="line.103"></a>
-<span class="sourceLineNo">104</span>          // send a close and wait for a max of 30 seconds<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          ServerManager.closeRegionSilentlyAndWait(master.getClusterConnection(),<a name="line.105"></a>
-<span class="sourceLineNo">106</span>              r.getServerName(), r.getRegion(), 30000);<a name="line.106"></a>
-<span class="sourceLineNo">107</span>          ZKUtil.deleteNode(zooKeeper, zooKeeper.getZNodePaths().getZNodeForReplica(replicaId));<a name="line.107"></a>
-<span class="sourceLineNo">108</span>        }<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      }<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    } catch (Exception ex) {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>      // ignore the exception since we don't want the master to be wedged due to potential<a name="line.111"></a>
-<span class="sourceLineNo">112</span>      // issues in the cleanup of the extra regions. We can do that cleanup via hbck or manually<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      LOG.warn("Ignoring exception " + ex);<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    }<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  }<a name="line.115"></a>
-<span class="sourceLineNo">116</span>}<a name="line.116"></a>
+<span class="sourceLineNo">068</span>    ServerName metaServername = MetaTableLocator.getMetaRegionLocation(this.master.getZooKeeper());<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    for (int i = 1; i &lt; numReplicas; i++) {<a name="line.69"></a>
+<span class="sourceLineNo">070</span>      // Get current meta state for replica from zk.<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      RegionState metaState = MetaTableLocator.getMetaRegionState(master.getZooKeeper(), i);<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      RegionInfo hri = RegionReplicaUtil.getRegionInfoForReplica(<a name="line.72"></a>
+<span class="sourceLineNo">073</span>          RegionInfoBuilder.FIRST_META_REGIONINFO, i);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      LOG.debug(hri.getRegionNameAsString() + " replica region state from zookeeper=" + metaState);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      if (metaServername.equals(metaState.getServerName())) {<a name="line.75"></a>
+<span class="sourceLineNo">076</span>        metaState = null;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>        LOG.info(hri.getRegionNameAsString() +<a name="line.77"></a>
+<span class="sourceLineNo">078</span>          " old location is same as current hbase:meta location; setting location as null...");<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      }<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      // These assigns run inline. All is blocked till they complete. Only interrupt is shutting<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      // down hosting server which calls AM#stop.<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      if (metaState != null &amp;&amp; metaState.getServerName() != null) {<a name="line.82"></a>
+<span class="sourceLineNo">083</span>        // Try to retain old assignment.<a name="line.83"></a>
+<span class="sourceLineNo">084</span>        assignmentManager.assign(hri, metaState.getServerName());<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      } else {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>        assignmentManager.assign(hri);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      }<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    }<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    unassignExcessMetaReplica(numReplicas);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>  private void unassignExcessMetaReplica(int numMetaReplicasConfigured) {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    final ZKWatcher zooKeeper = master.getZooKeeper();<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    // unassign the unneeded replicas (for e.g., if the previous master was configured<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    // with a replication of 3 and now it is 2, we need to unassign the 1 unneeded replica)<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    try {<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      List&lt;String&gt; metaReplicaZnodes = zooKeeper.getMetaReplicaNodes();<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      for (String metaReplicaZnode : metaReplicaZnodes) {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>        int replicaId = zooKeeper.getZNodePaths().getMetaReplicaIdFromZnode(metaReplicaZnode);<a name="line.99"></a>
+<span class="sourceLineNo">100</span>        if (replicaId &gt;= numMetaReplicasConfigured) {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>          RegionState r = MetaTableLocator.getMetaRegionState(zooKeeper, replicaId);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>          LOG.info("Closing excess replica of meta region " + r.getRegion());<a name="line.102"></a>
+<span class="sourceLineNo">103</span>          // send a close and wait for a max of 30 seconds<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          ServerManager.closeRegionSilentlyAndWait(master.getClusterConnection(),<a name="line.104"></a>
+<span class="sourceLineNo">105</span>              r.getServerName(), r.getRegion(), 30000);<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          ZKUtil.deleteNode(zooKeeper, zooKeeper.getZNodePaths().getZNodeForReplica(replicaId));<a name="line.106"></a>
+<span class="sourceLineNo">107</span>        }<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      }<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    } catch (Exception ex) {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      // ignore the exception since we don't want the master to be wedged due to potential<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      // issues in the cleanup of the extra regions. We can do that cleanup via hbck or manually<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      LOG.warn("Ignoring exception " + ex);<a name="line.112"></a>
+<span class="sourceLineNo">113</span>    }<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
+<span class="sourceLineNo">115</span>}<a name="line.115"></a>
 
 
 


[36/51] [partial] hbase-site git commit: Published site at 8bf966c8e936dec4d83bcbe85c5aab543f14a0df.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27555316/devapidocs/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html b/devapidocs/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html
index cd0b50b..8167077 100644
--- a/devapidocs/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html
@@ -18,8 +18,8 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":9,"i14":9,"i15":10,"i16":10,"i17":9,"i18":9,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -110,20 +110,17 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.78">MetaTableLocator</a>
+public final class <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.58">MetaTableLocator</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
-<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper
- which keeps hbase:meta region server location.
-
- Stateless class with a bunch of static methods. Doesn't manage resources passed in
- (e.g. Connection, ZKWatcher etc).
-
- Meta region location is set by <code>RegionServerServices</code>.
- This class doesn't use ZK watchers, rather accesses ZK directly.
-
- This class it stateless. The only reason it's not made a non-instantiable util class
- with a collection of static methods is that it'd be rather hard to mock properly in tests.
-
+<div class="block">Utility class to perform operation (get/wait for/verify/set/delete) on znode in ZooKeeper which
+ keeps hbase:meta region server location.
+ <p/>
+ Stateless class with a bunch of static methods. Doesn't manage resources passed in (e.g.
+ Connection, ZKWatcher etc).
+ <p/>
+ Meta region location is set by <code>RegionServerServices</code>. This class doesn't use ZK
+ watchers, rather accesses ZK directly.
+ <p/>
  TODO: rewrite using RPC calls to master to find out about hbase:meta.</div>
 </li>
 </ul>
@@ -147,10 +144,6 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private boolean</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#stopped">stopped</a></span></code>&nbsp;</td>
-</tr>
 </table>
 </li>
 </ul>
@@ -163,10 +156,12 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
 <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
-<th class="colOne" scope="col">Constructor and Description</th>
+<th class="colFirst" scope="col">Modifier</th>
+<th class="colLast" scope="col">Constructor and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#MetaTableLocator--">MetaTableLocator</a></span>()</code>&nbsp;</td>
+<td class="colFirst"><code>private </code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#MetaTableLocator--">MetaTableLocator</a></span>()</code>&nbsp;</td>
 </tr>
 </table>
 </li>
@@ -178,13 +173,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </a>
 <h3>Method Summary</h3>
 <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
+<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
 <tr>
 <th class="colFirst" scope="col">Modifier and Type</th>
 <th class="colLast" scope="col">Method and Description</th>
 </tr>
 <tr id="i0" class="altColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">blockUntilAvailable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    int&nbsp;replicaId,
                    long&nbsp;timeout)</code>
@@ -192,14 +187,14 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </td>
 </tr>
 <tr id="i1" class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">blockUntilAvailable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    long&nbsp;timeout)</code>
 <div class="block">Wait until the meta region is available and is not in transition.</div>
 </td>
 </tr>
 <tr id="i2" class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;</code></td>
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-org.apache.hadoop.conf.Configuration-">blockUntilAvailable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                    long&nbsp;timeout,
                    org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
@@ -207,92 +202,77 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </td>
 </tr>
 <tr id="i3" class="rowColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">deleteMetaLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper)</code>
 <div class="block">Deletes the location of <code>hbase:meta</code> in ZooKeeper.</div>
 </td>
 </tr>
 <tr id="i4" class="altColor">
-<td class="colFirst"><code>void</code></td>
+<td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#deleteMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">deleteMetaLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                   int&nbsp;replicaId)</code>&nbsp;</td>
 </tr>
 <tr id="i5" class="rowColor">
-<td class="colFirst"><code>private static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">getCachedConnection</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                   <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)</code>&nbsp;</td>
-</tr>
-<tr id="i6" class="altColor">
-<td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getListOfRegionInfos-java.util.List-">getListOfRegionInfos</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;pairs)</code>&nbsp;</td>
 </tr>
-<tr id="i7" class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<tr id="i6" class="altColor">
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Gets the meta region location, if available.</div>
 </td>
 </tr>
-<tr id="i8" class="altColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<tr id="i7" class="rowColor">
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                      int&nbsp;replicaId)</code>
 <div class="block">Gets the meta region location, if available.</div>
 </td>
 </tr>
-<tr id="i9" class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<tr id="i8" class="altColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegions</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Gets the meta regions for the given path with the default replica ID.</div>
 </td>
 </tr>
-<tr id="i10" class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
+<tr id="i9" class="rowColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegions-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegions</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
               int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions for the given path and replica ID.</div>
 </td>
 </tr>
-<tr id="i11" class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<tr id="i10" class="altColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
+<tr id="i11" class="rowColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionsAndLocations-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionsAndLocations</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                           int&nbsp;replicaId)</code>
 <div class="block">Gets the meta regions and their locations for the given path and replica ID.</div>
 </td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/master/RegionState.html" title="class in org.apache.hadoop.hbase.master">RegionState</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionState-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">getMetaRegionState</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Load the meta region state from the meta server ZNode.</div>
 </td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/master/RegionState.html" title="class in org.apache.hadoop.hbase.master">RegionState</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaRegionState-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-">getMetaRegionState</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                   int&nbsp;replicaId)</code>
 <div class="block">Load the meta region state from the meta server ZNode.</div>
 </td>
 </tr>
-<tr id="i15" class="rowColor">
-<td class="colFirst"><code>private org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">getMetaServerConnection</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                       <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                       long&nbsp;timeout,
-                       int&nbsp;replicaId)</code>
-<div class="block">Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the
- specified timeout for availability.</div>
-</td>
-</tr>
-<tr id="i16" class="altColor">
-<td class="colFirst"><code>boolean</code></td>
+<tr id="i14" class="altColor">
+<td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#isLocationAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">isLocationAvailable</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
 <div class="block">Checks if the meta region location is available.</div>
 </td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#setMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ServerName-int-org.apache.hadoop.hbase.master.RegionState.State-">setMetaLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
@@ -301,7 +281,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Sets the location of <code>hbase:meta</code> in ZooKeeper to the specified server address.</div>
 </td>
 </tr>
-<tr id="i18" class="altColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#setMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ServerName-org.apache.hadoop.hbase.master.RegionState.State-">setMetaLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
@@ -310,47 +290,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  specified server address.</div>
 </td>
 </tr>
-<tr id="i19" class="rowColor">
-<td class="colFirst"><code>void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#stop--">stop</a></span>()</code>
-<div class="block">Stop working.</div>
-</td>
-</tr>
-<tr id="i20" class="altColor">
-<td class="colFirst"><code>boolean</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">verifyMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
-                        <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                        long&nbsp;timeout)</code>
-<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
-</td>
-</tr>
-<tr id="i21" class="rowColor">
-<td class="colFirst"><code>boolean</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">verifyMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                        <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                        long&nbsp;timeout,
-                        int&nbsp;replicaId)</code>
-<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
-</td>
-</tr>
-<tr id="i22" class="altColor">
-<td class="colFirst"><code>private boolean</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">verifyRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                    org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;hostingServer,
-                    <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;address,
-                    byte[]&nbsp;regionName)</code>
-<div class="block">Verify we can connect to <code>hostingServer</code> and that its carrying
- <code>regionName</code>.</div>
-</td>
-</tr>
-<tr id="i23" class="rowColor">
-<td class="colFirst"><code>void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">waitMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</code>
-<div class="block">Waits indefinitely for availability of <code>hbase:meta</code>.</div>
-</td>
-</tr>
-<tr id="i24" class="altColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<tr id="i17" class="rowColor">
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       int&nbsp;replicaId,
                       long&nbsp;timeout)</code>
@@ -358,12 +299,12 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
  immediately available.</div>
 </td>
 </tr>
-<tr id="i25" class="rowColor">
-<td class="colFirst"><code><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
+<tr id="i18" class="altColor">
+<td class="colFirst"><code>static <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">waitMetaRegionLocation</a></span>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                       long&nbsp;timeout)</code>
-<div class="block">Gets the meta region location, if available, and waits for up to the
- specified timeout if not immediately available.</div>
+<div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
+ immediately available.</div>
 </td>
 </tr>
 </table>
@@ -391,19 +332,10 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <a name="LOG">
 <!--   -->
 </a>
-<ul class="blockList">
-<li class="blockList">
-<h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.79">LOG</a></pre>
-</li>
-</ul>
-<a name="stopped">
-<!--   -->
-</a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>stopped</h4>
-<pre>private volatile&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.82">stopped</a></pre>
+<h4>LOG</h4>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.59">LOG</a></pre>
 </li>
 </ul>
 </li>
@@ -420,7 +352,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MetaTableLocator</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.78">MetaTableLocator</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.61">MetaTableLocator</a>()</pre>
 </li>
 </ul>
 </li>
@@ -437,7 +369,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isLocationAvailable</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.88">isLocationAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.68">isLocationAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
 <div class="block">Checks if the meta region location is available.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -451,7 +383,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegionsAndLocations</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.96">getMetaRegionsAndLocations</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.76">getMetaRegionsAndLocations</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>zkw</code> - ZooKeeper watcher to be used</dd>
@@ -466,8 +398,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegionsAndLocations</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.107">getMetaRegionsAndLocations</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                                                    int&nbsp;replicaId)</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.87">getMetaRegionsAndLocations</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                                                           int&nbsp;replicaId)</pre>
 <div class="block">Gets the meta regions and their locations for the given path and replica ID.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -484,7 +416,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegions</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.122">getMetaRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.102">getMetaRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
 <div class="block">Gets the meta regions for the given path with the default replica ID.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -500,8 +432,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegions</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.133">getMetaRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                       int&nbsp;replicaId)</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.112">getMetaRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                              int&nbsp;replicaId)</pre>
 <div class="block">Gets the meta regions for the given path and replica ID.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -518,7 +450,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getListOfRegionInfos</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.139">getListOfRegionInfos</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;pairs)</pre>
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.118">getListOfRegionInfos</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&gt;&nbsp;pairs)</pre>
 </li>
 </ul>
 <a name="getMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">
@@ -527,7 +459,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegionLocation</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.156">getMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.136">getMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)</pre>
 <div class="block">Gets the meta region location, if available.  Does not block.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -543,8 +475,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegionLocation</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.171">getMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                        int&nbsp;replicaId)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.151">getMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                               int&nbsp;replicaId)</pre>
 <div class="block">Gets the meta region location, if available.  Does not block.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -561,21 +493,20 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>waitMetaRegionLocation</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.193">waitMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                         long&nbsp;timeout)
-                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
-                                         <a href="../../../../../org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.html" title="class in org.apache.hadoop.hbase">NotAllMetaRegionsOnlineException</a></pre>
-<div class="block">Gets the meta region location, if available, and waits for up to the
- specified timeout if not immediately available.
- Given the zookeeper notification could be delayed, we will try to
- get the latest data.</div>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.171">waitMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                                long&nbsp;timeout)
+                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
+                                                <a href="../../../../../org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.html" title="class in org.apache.hadoop.hbase">NotAllMetaRegionsOnlineException</a></pre>
+<div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
+ immediately available. Given the zookeeper notification could be delayed, we will try to get
+ the latest data.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
 <dd><code>timeout</code> - maximum time to wait, in millis</dd>
 <dt><span class="returnLabel">Returns:</span></dt>
-<dd>server name for server hosting meta region formatted as per
- <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase"><code>ServerName</code></a>, or null if none available</dd>
+<dd>server name for server hosting meta region formatted as per <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase"><code>ServerName</code></a>, or null
+         if none available</dd>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if interrupted while waiting</dd>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.html" title="class in org.apache.hadoop.hbase">NotAllMetaRegionsOnlineException</a></code> - if a meta or root region is not online</dd>
@@ -588,181 +519,35 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>waitMetaRegionLocation</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.211">waitMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                         int&nbsp;replicaId,
-                                         long&nbsp;timeout)
-                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
-                                         <a href="../../../../../org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.html" title="class in org.apache.hadoop.hbase">NotAllMetaRegionsOnlineException</a></pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.188">waitMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                                int&nbsp;replicaId,
+                                                long&nbsp;timeout)
+                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
+                                                <a href="../../../../../org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.html" title="class in org.apache.hadoop.hbase">NotAllMetaRegionsOnlineException</a></pre>
 <div class="block">Gets the meta region location, if available, and waits for up to the specified timeout if not
- immediately available. Given the zookeeper notification could be delayed, we will try to
- get the latest data.</div>
+ immediately available. Given the zookeeper notification could be delayed, we will try to get
+ the latest data.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
 <dd><code>replicaId</code> - the ID of the replica</dd>
 <dd><code>timeout</code> - maximum time to wait, in millis</dd>
 <dt><span class="returnLabel">Returns:</span></dt>
-<dd>server name for server hosting meta region formatted as per
- <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase"><code>ServerName</code></a>, or null if none available</dd>
+<dd>server name for server hosting meta region formatted as per <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase"><code>ServerName</code></a>, or null
+         if none available</dd>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if waiting for the socket operation fails</dd>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.html" title="class in org.apache.hadoop.hbase">NotAllMetaRegionsOnlineException</a></code> - if a meta or root region is not online</dd>
 </dl>
 </li>
 </ul>
-<a name="waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>waitMetaRegionLocation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.239">waitMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)
-                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
-<div class="block">Waits indefinitely for availability of <code>hbase:meta</code>.  Used during
- cluster startup.  Does not verify meta, just that something has been
- set up in zk.</div>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if interrupted while waiting</dd>
-<dt><span class="seeLabel">See Also:</span></dt>
-<dd><a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#waitMetaRegionLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-"><code>waitMetaRegionLocation(ZKWatcher, long)</code></a></dd>
-</dl>
-</li>
-</ul>
-<a name="verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>verifyMetaRegionLocation</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.272">verifyMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;hConnection,
-                                        <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                        long&nbsp;timeout)
-                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
-                                        <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
-<dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>hConnection</code> - the connection to use</dd>
-<dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
-<dd><code>timeout</code> - How long to wait on zk for meta address (passed through to
-                the internal call to <a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-"><code>getMetaServerConnection(org.apache.hadoop.hbase.client.ClusterConnection, org.apache.hadoop.hbase.zookeeper.ZKWatcher, long, int)</code></a>.</dd>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>True if the <code>hbase:meta</code> location is healthy.</dd>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if waiting for the socket operation fails</dd>
-</dl>
-</li>
-</ul>
-<a name="verifyMetaRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>verifyMetaRegionLocation</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.288">verifyMetaRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                                        <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                        long&nbsp;timeout,
-                                        int&nbsp;replicaId)
-                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
-                                        <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Verify <code>hbase:meta</code> is deployed and accessible.</div>
-<dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>connection</code> - the connection to use</dd>
-<dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
-<dd><code>timeout</code> - How long to wait on zk for meta address (passed through to</dd>
-<dd><code>replicaId</code> - the ID of the replica</dd>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>True if the <code>hbase:meta</code> location is healthy.</dd>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if waiting for the socket operation fails</dd>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
-</dl>
-</li>
-</ul>
-<a name="verifyRegionLocation-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface-org.apache.hadoop.hbase.ServerName-byte:A-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>verifyRegionLocation</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.320">verifyRegionLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                                     org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;hostingServer,
-                                     <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;address,
-                                     byte[]&nbsp;regionName)</pre>
-<div class="block">Verify we can connect to <code>hostingServer</code> and that its carrying
- <code>regionName</code>.</div>
-<dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>hostingServer</code> - Interface to the server hosting <code>regionName</code></dd>
-<dd><code>address</code> - The servername that goes with the <code>metaServer</code> interface.
-                Used logging.</dd>
-<dd><code>regionName</code> - The regionname we are interested in.</dd>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>True if we were able to verify the region located at other side of the interface.</dd>
-</dl>
-</li>
-</ul>
-<a name="getMetaServerConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.zookeeper.ZKWatcher-long-int-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>getMetaServerConnection</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.369">getMetaServerConnection</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                                                                                                                             <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                                                                                                             long&nbsp;timeout,
-                                                                                                                             int&nbsp;replicaId)
-                                                                                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
-                                                                                                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Gets a connection to the server hosting meta, as reported by ZooKeeper, waiting up to the
- specified timeout for availability.
-
- <p>WARNING: Does not retry.  Use an <a href="../../../../../org/apache/hadoop/hbase/client/HTable.html" title="class in org.apache.hadoop.hbase.client"><code>HTable</code></a> instead.</div>
-<dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>connection</code> - the connection to use</dd>
-<dd><code>zkw</code> - reference to the <a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>ZKWatcher</code></a> which also contains configuration and operation</dd>
-<dd><code>timeout</code> - How long to wait on meta location</dd>
-<dd><code>replicaId</code> - the ID of the replica</dd>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>connection to server hosting meta</dd>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></code> - if waiting for the socket operation fails</dd>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
-</dl>
-</li>
-</ul>
-<a name="getCachedConnection-org.apache.hadoop.hbase.client.ClusterConnection-org.apache.hadoop.hbase.ServerName-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>getCachedConnection</h4>
-<pre>private static&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.381">getCachedConnection</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection,
-                                                                                                                                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;sn)
-                                                                                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>sn</code> - ServerName to get a connection against.</dd>
-<dt><span class="returnLabel">Returns:</span></dt>
-<dd>The AdminProtocol we got when we connected to <code>sn</code>
-         May have come from cache, may not be good, may have been setup by this invocation, or
-         may be null.</dd>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code> - if the number of retries for getting the connection is exceeded</dd>
-</dl>
-</li>
-</ul>
 <a name="setMetaLocation-org.apache.hadoop.hbase.zookeeper.ZKWatcher-org.apache.hadoop.hbase.ServerName-org.apache.hadoop.hbase.master.RegionState.State-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>setMetaLocation</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.432">setMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.217">setMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                                    <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                    <a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master">RegionState.State</a>&nbsp;state)
                             throws org.apache.zookeeper.KeeperException</pre>
@@ -784,7 +569,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setMetaLocation</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.446">setMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.231">setMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
                                    <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                                    int&nbsp;replicaId,
                                    <a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master">RegionState.State</a>&nbsp;state)
@@ -808,7 +593,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegionState</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.html" title="class in org.apache.hadoop.hbase.master">RegionState</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.479">getMetaRegionState</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.html" title="class in org.apache.hadoop.hbase.master">RegionState</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.264">getMetaRegionState</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw)
                                       throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Load the meta region state from the meta server ZNode.</div>
 <dl>
@@ -823,7 +608,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaRegionState</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.html" title="class in org.apache.hadoop.hbase.master">RegionState</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.491">getMetaRegionState</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.html" title="class in org.apache.hadoop.hbase.master">RegionState</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.276">getMetaRegionState</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
                                              int&nbsp;replicaId)
                                       throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Load the meta region state from the meta server ZNode.</div>
@@ -844,8 +629,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteMetaLocation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.535">deleteMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper)
-                        throws org.apache.zookeeper.KeeperException</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.320">deleteMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper)
+                               throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Deletes the location of <code>hbase:meta</code> in ZooKeeper.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -861,9 +646,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteMetaLocation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.540">deleteMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
-                               int&nbsp;replicaId)
-                        throws org.apache.zookeeper.KeeperException</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.325">deleteMetaLocation</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zookeeper,
+                                      int&nbsp;replicaId)
+                               throws org.apache.zookeeper.KeeperException</pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code>org.apache.zookeeper.KeeperException</code></dd>
@@ -876,10 +661,10 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>blockUntilAvailable</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.564">blockUntilAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                            long&nbsp;timeout,
-                                            org.apache.hadoop.conf.Configuration&nbsp;conf)
-                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.349">blockUntilAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                                   long&nbsp;timeout,
+                                                   org.apache.hadoop.conf.Configuration&nbsp;conf)
+                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Wait until the primary meta region is available. Get the secondary locations as well but don't
  block for those.</div>
 <dl>
@@ -900,9 +685,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>blockUntilAvailable</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.599">blockUntilAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                      long&nbsp;timeout)
-                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.384">blockUntilAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                             long&nbsp;timeout)
+                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Wait until the meta region is available and is not in transition.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -918,13 +703,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <a name="blockUntilAvailable-org.apache.hadoop.hbase.zookeeper.ZKWatcher-int-long-">
 <!--   -->
 </a>
-<ul class="blockList">
+<ul class="blockListLast">
 <li class="blockList">
 <h4>blockUntilAvailable</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.613">blockUntilAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
-                                      int&nbsp;replicaId,
-                                      long&nbsp;timeout)
-                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.397">blockUntilAvailable</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;zkw,
+                                             int&nbsp;replicaId,
+                                             long&nbsp;timeout)
+                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Wait until the meta region is available and is not in transition.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -938,17 +723,6 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
-<a name="stop--">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html#line.640">stop</a>()</pre>
-<div class="block">Stop working.
- Interrupts any ongoing waits.</div>
-</li>
-</ul>
 </li>
 </ul>
 </li>